import warnings
warnings.filterwarnings('ignore')
import os
import cv2
import random
import shutil
import warnings
import numpy as np
%matplotlib inline
import pandas as pd
import seaborn as sns
import tensorflow as tf
from tensorflow import keras
from tqdm.notebook import tqdm
import matplotlib.pyplot as plt
warnings.filterwarnings("ignore")
from keras.optimizers import Adam
from tensorflow.keras import Model
from keras.models import Sequential
from tensorflow.keras.preprocessing.image import load_img
from tensorflow.keras.utils import to_categorical
from keras.callbacks import EarlyStopping,ModelCheckpoint
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from keras.layers import Flatten , Dense , Conv2D, Dropout , MaxPooling2D
from tensorflow.keras.layers import Input, Conv2D, MaxPooling2D, AveragePooling2D, Concatenate, Flatten, Dense
Dataset_path = 'Student-engagement-dataset'
Target_path = 'Student-engagement-Dataset'
# create train/test/validation dirs
Train_dir = os.path.join(Target_path,'train')
valid_dir = os.path.join(Target_path,'validation')
Test_dir = os.path.join(Target_path,'test')
os.makedirs(Target_path,exist_ok = True)
os.makedirs(Train_dir,exist_ok=True)
os.makedirs(valid_dir,exist_ok=True)
os.makedirs(Test_dir,exist_ok=True)
img_size = (256,256)
image_paths = []
labels = []
preprocced_image_paths = []
preprocced_image_paths_test=[]
for path in os.listdir(Dataset_path) : #label = eng , diseng
main_classes_dir = os.path.join(Dataset_path,path) #print(main_classes_dir)
for main_path in os.listdir(main_classes_dir): #print labels=fursted,bored,drowsy,confused,..
sub_classes_dir = os.path.join(main_classes_dir,main_path)
img_count = len(os.listdir(sub_classes_dir))
test_img_count = int(0.2*img_count)
train_img_count = img_count - test_img_count
target_train_dir = os.path.join(Train_dir,main_path)
target_test_dir = os.path.join(Test_dir,main_path)
target_validation_dir = os.path.join(valid_dir,main_path)
os.makedirs(target_train_dir,exist_ok =True) #files with labels
os.makedirs(target_test_dir,exist_ok=True)
os.makedirs(target_validation_dir,exist_ok=True)
for sub_main_path in os.listdir(sub_classes_dir):
image_path = os.path.join(sub_classes_dir,sub_main_path)
image_paths.append(image_path)
labels.append(main_path)
#Resize ,normlization
img = cv2.imread(image_path)
img = cv2.resize(img,img_size)
# img = img / 255
#Split train,test images
if len(os.listdir(target_test_dir)) != test_img_count:
cv2.imwrite(os.path.join(target_test_dir, sub_main_path), img)
preprocced_image_paths.append(os.path.join(target_test_dir,sub_main_path))
# preprocced_image_paths_test.append(os.path.join(target_test_dir,sub_main_path))
else:
cv2.imwrite(os.path.join(target_train_dir,sub_main_path),img )
preprocced_image_paths.append(os.path.join(target_train_dir,sub_main_path))
print(image_paths[0:2])
['Student-engagement-dataset\\Engaged\\confused\\0020.jpg', 'Student-engagement-dataset\\Engaged\\confused\\0021.jpg']
img_count = len(image_paths)
test_img_count = int(0.2*img_count)
train_img_count = img_count - test_img_count
print("img_count",img_count)
print("test_no", test_img_count)
print("train_img_no",train_img_count)
img_count 4240 test_no 848 train_img_no 3392
dataset = pd.DataFrame()
dataset["label"],dataset["image"],dataset['preproccesd_image']= labels,image_paths,preprocced_image_paths
dataset = dataset.sample(frac=1).reset_index(drop=True)
print(dataset["image"][3])
print(dataset['preproccesd_image'][3])
dataset.head()
Student-engagement-dataset\train\engaged\0294.jpg Student-engagement-Dataset\test\engaged\0294.jpg
| label | image | preproccesd_image | |
|---|---|---|---|
| 0 | bored | Student-engagement-dataset\Not engaged\bored\0... | Student-engagement-Dataset\train\bored\0199.jpg |
| 1 | engaged | Student-engagement-dataset\Engaged\engaged\014... | Student-engagement-Dataset\test\engaged\0149.jpg |
| 2 | frustrated | Student-engagement-dataset\train\frustrated\00... | Student-engagement-Dataset\test\frustrated\009... |
| 3 | engaged | Student-engagement-dataset\train\engaged\0294.jpg | Student-engagement-Dataset\test\engaged\0294.jpg |
| 4 | engaged | Student-engagement-dataset\train\engaged\0271.jpg | Student-engagement-Dataset\test\engaged\0271.jpg |
sns.countplot(x=dataset["label"]);
plt.title("Student Engagment Dataset") ;
from PIL import Image
plt.figure(figsize=(20,20))
dataset_ = pd.DataFrame(zip(dataset['label'] , dataset['image']))
files=dataset_.iloc[0:9]
for index,label,file_path in files.itertuples():
#print(index,file_path,label)
plt.subplot(3,3,index+1)
img=load_img(file_path)
img=np.array(img)
plt.imshow(img)
plt.title(label,fontsize=30)
plt.axis('off')
input_shape = [256,256,3]
# find min_shape
def min_shape(image_paths) :
min_shape = [np.inf,np.inf,3]
for path in image_paths :
img = Image.open(path)
img = np.array(img)
for index,shape in enumerate(img.shape) :
if img.shape[index] <= min_shape[index]:
min_shape[index] = img.shape[index]
return min_shape
# display sample from data after preproccing
plt.figure(figsize=(20,20))
dataset = pd.DataFrame(zip(dataset['label'] , dataset['preproccesd_image']))
files=dataset.iloc[0:9]
# img=Image.open(img_path)
# plt.imshow(img);
# min_shape= [np.inf,np.inf,3]
for index,label,file_path in files.itertuples():
#print(index,file_path,label)
plt.subplot(3,3,index+1)
img=load_img(file_path)
img=np.array(img)
plt.imshow(img)
plt.title(label,fontsize=30)
plt.axis('off')
#number of data in target_test_dir (explore equivlent of data in test part )
image_paths = []
labels_ = []
test_dir_path = "Student-engagement-Dataset/test"
for file in os.listdir(test_dir_path) :
labels = os.path.join(test_dir_path,file)
print(file)
for img_name in os.listdir(labels) :
img_path = os.path.join(labels,img_name)
image_paths.append(img_path)
labels_.append(file)
print(len(labels_))
sns.countplot(x=labels_) ;
bored confused drowsy engaged frustrated Looking Away 2120
#train_dir_path = "/kaggle/working/Student-engagement-Dataset/train"
image_paths=[]
labels_=[]
for file in os.listdir(Train_dir) :
labels = os.path.join(Train_dir,file)
print(file)
for img_name in os.listdir(labels) :
img_path = os.path.join(labels,img_name)
image_paths.append(img_path)
labels_.append(file)
print(len(labels_))
sns.countplot(x=labels_) ;
bored confused drowsy engaged frustrated Looking Away 1699
from tensorflow.keras.applications import MobileNetV2, Xception, NASNetMobile
train_path = "Student-engagement-dataset/train"
test_path = "Student-engagement-dataset/test"
val_path = "Student-engagement-dataset/validation"
IMAGE_SIZE = [256,256]
# Scaling all the images between 0 to 1
datagen = ImageDataGenerator(rescale = 1./255, shear_range=0.2, zoom_range=0.2, horizontal_flip=False)
train_set = datagen.flow_from_directory(train_path,
target_size=(256,256),
batch_size=2,
class_mode = 'categorical')
test_set = datagen.flow_from_directory(test_path,
target_size=(256,256),
batch_size=2,
class_mode='categorical')
val_set = datagen.flow_from_directory(val_path,
target_size=(256,256),
batch_size=2,
class_mode='categorical')
Found 1699 images belonging to 6 classes. Found 2120 images belonging to 6 classes. Found 0 images belonging to 6 classes.
from keras import backend as K
def recall_m(y_true, y_pred):
true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))
possible_positives = K.sum(K.round(K.clip(y_true, 0, 1)))
recall = true_positives / (possible_positives + K.epsilon())
return recall
def precision_m(y_true, y_pred):
true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))
predicted_positives = K.sum(K.round(K.clip(y_pred, 0, 1)))
precision = true_positives / (predicted_positives + K.epsilon())
return precision
def f1_m(y_true, y_pred):
precision = precision_m(y_true, y_pred)
recall = recall_m(y_true, y_pred)
return 2*((precision*recall)/(precision+recall+K.epsilon()))
from keras.metrics import Precision, Recall
from keras.callbacks import EarlyStopping, ReduceLROnPlateau
from tensorflow.keras.models import Sequential, Model
learning_rate_reduction = ReduceLROnPlateau(
monitor="val_accuracy", patience=3, verbose=1, factor=0.3, min_lr=0.0000001
)
early_stop = EarlyStopping(
patience=10,
verbose=1,
monitor="val_accuracy",
mode="max",
min_delta=0.001,
restore_best_weights=True,
)
ML_Model = []
accuracy = []
precision = []
recall = []
f1score = []
#function to call for storing the results
def storeResults(model, a,b,c,d):
ML_Model.append(model)
accuracy.append(round(a, 3))
precision.append(round(b, 3))
recall.append(round(c, 3))
f1score.append(round(d, 3))
from keras.metrics import Precision, Recall
from keras.callbacks import EarlyStopping, ReduceLROnPlateau
from tensorflow.keras.models import Sequential, Model
learning_rate_reduction = ReduceLROnPlateau(
monitor="val_accuracy", patience=3, verbose=1, factor=0.3, min_lr=0.0000001
)
early_stop = EarlyStopping(
patience=20,
verbose=1,
monitor="val_accuracy",
mode="max",
min_delta=0.001,
restore_best_weights=True,
)
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Conv2D, Dense, MaxPool2D, Dropout, Flatten
from tensorflow.keras.optimizers import Adam
model1 = Sequential()
model1.add(Conv2D(filters=32, kernel_size=(5,5), padding='same', activation='relu', input_shape=(256, 256, 3)))
model1.add(MaxPool2D(strides=2))
model1.add(Conv2D(filters=48, kernel_size=(5,5), padding='valid', activation='relu'))
model1.add(MaxPool2D(strides=2))
model1.add(Flatten())
model1.add(Dense(256, activation='relu'))
model1.add(Dense(84, activation='relu'))
model1.add(Dense(6, activation='softmax'))
model1.compile(loss = 'categorical_crossentropy', optimizer='adam', metrics=["accuracy",f1_m,precision_m, recall_m])
model1.build()
model1.summary()
Model: "sequential_1"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d_2 (Conv2D) (None, 256, 256, 32) 2432
max_pooling2d_2 (MaxPooling (None, 128, 128, 32) 0
2D)
conv2d_3 (Conv2D) (None, 124, 124, 48) 38448
max_pooling2d_3 (MaxPooling (None, 62, 62, 48) 0
2D)
flatten_1 (Flatten) (None, 184512) 0
dense_3 (Dense) (None, 256) 47235328
dense_4 (Dense) (None, 84) 21588
dense_5 (Dense) (None, 6) 510
=================================================================
Total params: 47,298,306
Trainable params: 47,298,306
Non-trainable params: 0
_________________________________________________________________
hist1 = model1.fit(train_set, validation_data=test_set, epochs=50, steps_per_epoch=len(train_set), validation_steps=len(test_set))#,callbacks=[learning_rate_reduction, early_stop])
Epoch 1/50 850/850 [==============================] - 41s 48ms/step - loss: 0.1050 - accuracy: 0.9535 - f1_m: 0.9529 - precision_m: 0.9541 - recall_m: 0.9524 - val_loss: 0.2004 - val_accuracy: 0.9349 - val_f1_m: 0.9352 - val_precision_m: 0.9358 - val_recall_m: 0.9349 Epoch 2/50 850/850 [==============================] - 41s 48ms/step - loss: 0.2904 - accuracy: 0.9459 - f1_m: 0.9447 - precision_m: 0.9471 - recall_m: 0.9435 - val_loss: 0.2183 - val_accuracy: 0.9080 - val_f1_m: 0.9085 - val_precision_m: 0.9104 - val_recall_m: 0.9075 Epoch 3/50 850/850 [==============================] - 41s 49ms/step - loss: 0.0994 - accuracy: 0.9523 - f1_m: 0.9520 - precision_m: 0.9524 - recall_m: 0.9518 - val_loss: 0.2195 - val_accuracy: 0.9274 - val_f1_m: 0.9267 - val_precision_m: 0.9311 - val_recall_m: 0.9245 Epoch 4/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0998 - accuracy: 0.9523 - f1_m: 0.9520 - precision_m: 0.9524 - recall_m: 0.9518 - val_loss: 0.2057 - val_accuracy: 0.9392 - val_f1_m: 0.9388 - val_precision_m: 0.9392 - val_recall_m: 0.9387 Epoch 5/50 850/850 [==============================] - 41s 48ms/step - loss: 0.0922 - accuracy: 0.9529 - f1_m: 0.9529 - precision_m: 0.9529 - recall_m: 0.9529 - val_loss: 0.1902 - val_accuracy: 0.9406 - val_f1_m: 0.9395 - val_precision_m: 0.9401 - val_recall_m: 0.9392 Epoch 6/50 850/850 [==============================] - 41s 48ms/step - loss: 0.1025 - accuracy: 0.9523 - f1_m: 0.9512 - precision_m: 0.9524 - recall_m: 0.9506 - val_loss: 0.2654 - val_accuracy: 0.9363 - val_f1_m: 0.9362 - val_precision_m: 0.9368 - val_recall_m: 0.9358 Epoch 7/50 850/850 [==============================] - 41s 48ms/step - loss: 0.1509 - accuracy: 0.9417 - f1_m: 0.9369 - precision_m: 0.9412 - recall_m: 0.9347 - val_loss: 0.5266 - val_accuracy: 0.9245 - val_f1_m: 0.9245 - val_precision_m: 0.9245 - val_recall_m: 0.9245 Epoch 8/50 850/850 [==============================] - 41s 48ms/step - loss: 0.1040 - accuracy: 0.9547 - f1_m: 0.9547 - precision_m: 0.9547 - recall_m: 0.9547 - val_loss: 0.3382 - val_accuracy: 0.9255 - val_f1_m: 0.9256 - val_precision_m: 0.9259 - val_recall_m: 0.9255 Epoch 9/50 850/850 [==============================] - 41s 48ms/step - loss: 0.0866 - accuracy: 0.9541 - f1_m: 0.9541 - precision_m: 0.9541 - recall_m: 0.9541 - val_loss: 0.2331 - val_accuracy: 0.9396 - val_f1_m: 0.9385 - val_precision_m: 0.9410 - val_recall_m: 0.9373 Epoch 10/50 850/850 [==============================] - 41s 48ms/step - loss: 0.0861 - accuracy: 0.9576 - f1_m: 0.9578 - precision_m: 0.9582 - recall_m: 0.9576 - val_loss: 0.4892 - val_accuracy: 0.9349 - val_f1_m: 0.9349 - val_precision_m: 0.9349 - val_recall_m: 0.9349 Epoch 11/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0890 - accuracy: 0.9559 - f1_m: 0.9551 - precision_m: 0.9559 - recall_m: 0.9547 - val_loss: 0.2885 - val_accuracy: 0.9321 - val_f1_m: 0.9321 - val_precision_m: 0.9321 - val_recall_m: 0.9321 Epoch 12/50 850/850 [==============================] - 41s 48ms/step - loss: 0.0786 - accuracy: 0.9588 - f1_m: 0.9588 - precision_m: 0.9588 - recall_m: 0.9588 - val_loss: 0.3907 - val_accuracy: 0.9358 - val_f1_m: 0.9360 - val_precision_m: 0.9363 - val_recall_m: 0.9358 Epoch 13/50 850/850 [==============================] - 42s 50ms/step - loss: 0.1588 - accuracy: 0.9470 - f1_m: 0.9467 - precision_m: 0.9482 - recall_m: 0.9459 - val_loss: 0.5348 - val_accuracy: 0.9269 - val_f1_m: 0.9266 - val_precision_m: 0.9269 - val_recall_m: 0.9264 Epoch 14/50 850/850 [==============================] - 41s 49ms/step - loss: 0.0849 - accuracy: 0.9547 - f1_m: 0.9549 - precision_m: 0.9553 - recall_m: 0.9547 - val_loss: 0.3724 - val_accuracy: 0.9406 - val_f1_m: 0.9406 - val_precision_m: 0.9406 - val_recall_m: 0.9406 Epoch 15/50 850/850 [==============================] - 41s 48ms/step - loss: 0.0847 - accuracy: 0.9594 - f1_m: 0.9594 - precision_m: 0.9594 - recall_m: 0.9594 - val_loss: 0.3419 - val_accuracy: 0.9358 - val_f1_m: 0.9360 - val_precision_m: 0.9363 - val_recall_m: 0.9358 Epoch 16/50 850/850 [==============================] - 41s 48ms/step - loss: 0.0784 - accuracy: 0.9588 - f1_m: 0.9584 - precision_m: 0.9588 - recall_m: 0.9582 - val_loss: 0.2996 - val_accuracy: 0.9363 - val_f1_m: 0.9360 - val_precision_m: 0.9363 - val_recall_m: 0.9358 Epoch 17/50 850/850 [==============================] - 41s 48ms/step - loss: 0.0950 - accuracy: 0.9600 - f1_m: 0.9600 - precision_m: 0.9600 - recall_m: 0.9600 - val_loss: 0.3783 - val_accuracy: 0.9344 - val_f1_m: 0.9344 - val_precision_m: 0.9344 - val_recall_m: 0.9344 Epoch 18/50 850/850 [==============================] - 41s 48ms/step - loss: 0.1130 - accuracy: 0.9506 - f1_m: 0.9490 - precision_m: 0.9506 - recall_m: 0.9482 - val_loss: 0.3399 - val_accuracy: 0.9193 - val_f1_m: 0.9193 - val_precision_m: 0.9193 - val_recall_m: 0.9193 Epoch 19/50 850/850 [==============================] - 41s 49ms/step - loss: 0.0700 - accuracy: 0.9617 - f1_m: 0.9620 - precision_m: 0.9624 - recall_m: 0.9618 - val_loss: 0.4413 - val_accuracy: 0.9373 - val_f1_m: 0.9373 - val_precision_m: 0.9373 - val_recall_m: 0.9373 Epoch 20/50 850/850 [==============================] - 42s 50ms/step - loss: 0.0765 - accuracy: 0.9588 - f1_m: 0.9580 - precision_m: 0.9588 - recall_m: 0.9576 - val_loss: 0.3877 - val_accuracy: 0.9382 - val_f1_m: 0.9382 - val_precision_m: 0.9382 - val_recall_m: 0.9382 Epoch 21/50 850/850 [==============================] - 42s 49ms/step - loss: 0.1167 - accuracy: 0.9511 - f1_m: 0.9506 - precision_m: 0.9518 - recall_m: 0.9500 - val_loss: 0.3268 - val_accuracy: 0.9410 - val_f1_m: 0.9410 - val_precision_m: 0.9410 - val_recall_m: 0.9410 Epoch 22/50 850/850 [==============================] - 43s 50ms/step - loss: 0.1508 - accuracy: 0.9612 - f1_m: 0.9612 - precision_m: 0.9612 - recall_m: 0.9612 - val_loss: 0.5842 - val_accuracy: 0.9354 - val_f1_m: 0.9351 - val_precision_m: 0.9354 - val_recall_m: 0.9349 Epoch 23/50 850/850 [==============================] - 41s 49ms/step - loss: 0.0898 - accuracy: 0.9582 - f1_m: 0.9582 - precision_m: 0.9582 - recall_m: 0.9582 - val_loss: 0.4224 - val_accuracy: 0.9387 - val_f1_m: 0.9385 - val_precision_m: 0.9392 - val_recall_m: 0.9382 Epoch 24/50 850/850 [==============================] - 41s 48ms/step - loss: 0.0701 - accuracy: 0.9617 - f1_m: 0.9618 - precision_m: 0.9618 - recall_m: 0.9618 - val_loss: 0.4128 - val_accuracy: 0.9434 - val_f1_m: 0.9434 - val_precision_m: 0.9434 - val_recall_m: 0.9434 Epoch 25/50 850/850 [==============================] - 41s 48ms/step - loss: 0.0824 - accuracy: 0.9600 - f1_m: 0.9600 - precision_m: 0.9600 - recall_m: 0.9600 - val_loss: 0.4479 - val_accuracy: 0.9392 - val_f1_m: 0.9390 - val_precision_m: 0.9396 - val_recall_m: 0.9387 Epoch 26/50 850/850 [==============================] - 41s 48ms/step - loss: 0.0734 - accuracy: 0.9635 - f1_m: 0.9635 - precision_m: 0.9635 - recall_m: 0.9635 - val_loss: 0.3730 - val_accuracy: 0.9406 - val_f1_m: 0.9406 - val_precision_m: 0.9406 - val_recall_m: 0.9406 Epoch 27/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0797 - accuracy: 0.9629 - f1_m: 0.9629 - precision_m: 0.9629 - recall_m: 0.9629 - val_loss: 0.2907 - val_accuracy: 0.9358 - val_f1_m: 0.9358 - val_precision_m: 0.9358 - val_recall_m: 0.9358 Epoch 28/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0700 - accuracy: 0.9600 - f1_m: 0.9600 - precision_m: 0.9600 - recall_m: 0.9600 - val_loss: 0.3718 - val_accuracy: 0.9377 - val_f1_m: 0.9371 - val_precision_m: 0.9387 - val_recall_m: 0.9363 Epoch 29/50 850/850 [==============================] - 41s 49ms/step - loss: 0.0929 - accuracy: 0.9606 - f1_m: 0.9606 - precision_m: 0.9606 - recall_m: 0.9606 - val_loss: 0.2783 - val_accuracy: 0.9420 - val_f1_m: 0.9420 - val_precision_m: 0.9420 - val_recall_m: 0.9420 Epoch 30/50 850/850 [==============================] - 41s 48ms/step - loss: 0.1378 - accuracy: 0.9629 - f1_m: 0.9629 - precision_m: 0.9629 - recall_m: 0.9629 - val_loss: 0.2962 - val_accuracy: 0.9439 - val_f1_m: 0.9439 - val_precision_m: 0.9439 - val_recall_m: 0.9439 Epoch 31/50 850/850 [==============================] - 41s 49ms/step - loss: 0.0693 - accuracy: 0.9676 - f1_m: 0.9676 - precision_m: 0.9676 - recall_m: 0.9676 - val_loss: 0.3495 - val_accuracy: 0.9401 - val_f1_m: 0.9404 - val_precision_m: 0.9410 - val_recall_m: 0.9401 Epoch 32/50 850/850 [==============================] - 41s 48ms/step - loss: 0.0690 - accuracy: 0.9641 - f1_m: 0.9637 - precision_m: 0.9641 - recall_m: 0.9635 - val_loss: 0.2703 - val_accuracy: 0.9458 - val_f1_m: 0.9458 - val_precision_m: 0.9458 - val_recall_m: 0.9458 Epoch 33/50 850/850 [==============================] - 41s 48ms/step - loss: 0.0673 - accuracy: 0.9659 - f1_m: 0.9659 - precision_m: 0.9659 - recall_m: 0.9659 - val_loss: 0.2388 - val_accuracy: 0.9481 - val_f1_m: 0.9483 - val_precision_m: 0.9486 - val_recall_m: 0.9481 Epoch 34/50 850/850 [==============================] - 41s 48ms/step - loss: 0.0685 - accuracy: 0.9653 - f1_m: 0.9653 - precision_m: 0.9653 - recall_m: 0.9653 - val_loss: 0.2414 - val_accuracy: 0.9443 - val_f1_m: 0.9443 - val_precision_m: 0.9443 - val_recall_m: 0.9443 Epoch 35/50 850/850 [==============================] - 41s 48ms/step - loss: 0.0674 - accuracy: 0.9682 - f1_m: 0.9682 - precision_m: 0.9682 - recall_m: 0.9682 - val_loss: 0.2110 - val_accuracy: 0.9486 - val_f1_m: 0.9480 - val_precision_m: 0.9486 - val_recall_m: 0.9476 Epoch 36/50 850/850 [==============================] - 41s 48ms/step - loss: 0.0841 - accuracy: 0.9623 - f1_m: 0.9625 - precision_m: 0.9629 - recall_m: 0.9624 - val_loss: 0.3320 - val_accuracy: 0.9429 - val_f1_m: 0.9429 - val_precision_m: 0.9429 - val_recall_m: 0.9429 Epoch 37/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0675 - accuracy: 0.9653 - f1_m: 0.9653 - precision_m: 0.9653 - recall_m: 0.9653 - val_loss: 0.3258 - val_accuracy: 0.9434 - val_f1_m: 0.9434 - val_precision_m: 0.9434 - val_recall_m: 0.9434 Epoch 38/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0673 - accuracy: 0.9688 - f1_m: 0.9688 - precision_m: 0.9688 - recall_m: 0.9688 - val_loss: 0.2556 - val_accuracy: 0.9429 - val_f1_m: 0.9429 - val_precision_m: 0.9429 - val_recall_m: 0.9429 Epoch 39/50 850/850 [==============================] - 41s 49ms/step - loss: 0.0715 - accuracy: 0.9653 - f1_m: 0.9653 - precision_m: 0.9653 - recall_m: 0.9653 - val_loss: 0.1994 - val_accuracy: 0.9491 - val_f1_m: 0.9491 - val_precision_m: 0.9491 - val_recall_m: 0.9491 Epoch 40/50 850/850 [==============================] - 41s 48ms/step - loss: 0.0652 - accuracy: 0.9676 - f1_m: 0.9676 - precision_m: 0.9676 - recall_m: 0.9676 - val_loss: 0.2159 - val_accuracy: 0.9524 - val_f1_m: 0.9524 - val_precision_m: 0.9524 - val_recall_m: 0.9524 Epoch 41/50 850/850 [==============================] - 41s 48ms/step - loss: 0.0653 - accuracy: 0.9670 - f1_m: 0.9671 - precision_m: 0.9671 - recall_m: 0.9671 - val_loss: 0.3885 - val_accuracy: 0.9491 - val_f1_m: 0.9491 - val_precision_m: 0.9491 - val_recall_m: 0.9491 Epoch 42/50 850/850 [==============================] - 41s 48ms/step - loss: 0.0686 - accuracy: 0.9641 - f1_m: 0.9641 - precision_m: 0.9641 - recall_m: 0.9641 - val_loss: 0.2559 - val_accuracy: 0.9467 - val_f1_m: 0.9467 - val_precision_m: 0.9467 - val_recall_m: 0.9467 Epoch 43/50 850/850 [==============================] - 41s 49ms/step - loss: 0.0661 - accuracy: 0.9665 - f1_m: 0.9665 - precision_m: 0.9665 - recall_m: 0.9665 - val_loss: 0.2107 - val_accuracy: 0.9439 - val_f1_m: 0.9439 - val_precision_m: 0.9439 - val_recall_m: 0.9439 Epoch 44/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0979 - accuracy: 0.9617 - f1_m: 0.9618 - precision_m: 0.9618 - recall_m: 0.9618 - val_loss: 0.6290 - val_accuracy: 0.9250 - val_f1_m: 0.9253 - val_precision_m: 0.9259 - val_recall_m: 0.9250 Epoch 45/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0653 - accuracy: 0.9688 - f1_m: 0.9688 - precision_m: 0.9688 - recall_m: 0.9688 - val_loss: 0.5441 - val_accuracy: 0.9415 - val_f1_m: 0.9415 - val_precision_m: 0.9415 - val_recall_m: 0.9415 Epoch 46/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0612 - accuracy: 0.9694 - f1_m: 0.9694 - precision_m: 0.9694 - recall_m: 0.9694 - val_loss: 0.4942 - val_accuracy: 0.9401 - val_f1_m: 0.9401 - val_precision_m: 0.9401 - val_recall_m: 0.9401 Epoch 47/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0632 - accuracy: 0.9670 - f1_m: 0.9671 - precision_m: 0.9671 - recall_m: 0.9671 - val_loss: 0.3943 - val_accuracy: 0.9382 - val_f1_m: 0.9382 - val_precision_m: 0.9382 - val_recall_m: 0.9382 Epoch 48/50 850/850 [==============================] - 41s 48ms/step - loss: 0.0609 - accuracy: 0.9712 - f1_m: 0.9712 - precision_m: 0.9712 - recall_m: 0.9712 - val_loss: 0.4886 - val_accuracy: 0.9415 - val_f1_m: 0.9415 - val_precision_m: 0.9415 - val_recall_m: 0.9415 Epoch 49/50 850/850 [==============================] - 41s 48ms/step - loss: 0.0588 - accuracy: 0.9712 - f1_m: 0.9706 - precision_m: 0.9706 - recall_m: 0.9706 - val_loss: 0.5307 - val_accuracy: 0.9302 - val_f1_m: 0.9302 - val_precision_m: 0.9302 - val_recall_m: 0.9302 Epoch 50/50 850/850 [==============================] - 41s 48ms/step - loss: 0.0581 - accuracy: 0.9694 - f1_m: 0.9694 - precision_m: 0.9694 - recall_m: 0.9694 - val_loss: 0.4255 - val_accuracy: 0.9462 - val_f1_m: 0.9464 - val_precision_m: 0.9467 - val_recall_m: 0.9462
dl_acc = hist1.history["val_accuracy"][49]
dl_prec = hist1.history["val_precision_m"][49]
dl_rec = hist1.history["val_recall_m"][49]
dl_f1 = hist1.history["val_f1_m"][49]
storeResults('CNN',dl_acc,dl_prec,dl_rec,dl_f1)
import matplotlib.pyplot as plt
x=hist1
plt.figure(figsize=(20,10))
plt.subplot(1, 2, 1)
plt.suptitle('Optimizer : adam', fontsize=10)
plt.ylabel('Loss', fontsize=16)
plt.plot(x.history['loss'], label='Training Loss')
plt.plot(x.history['val_loss'], label='Validation Loss')
plt.legend(loc='upper right')
plt.subplot(1, 2, 2)
plt.ylabel('Accuracy', fontsize=16)
plt.plot(x.history['accuracy'], label='Training Accuracy')
plt.plot(x.history['val_accuracy'], label='Validation Accuracy')
plt.legend(loc='lower right')
plt.show()
base_model = MobileNetV2(input_shape = IMAGE_SIZE + [3], weights=None, include_top=False)
x1= Flatten()(base_model.output)
prediction1 = Dense(6, activation='softmax')(x1)
model4 = Model(inputs = base_model.inputs, outputs = prediction1)
model4.summary()
model4.compile(loss = 'categorical_crossentropy', optimizer='sgd', metrics=["accuracy",f1_m,precision_m, recall_m])
Model: "model"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) [(None, 256, 256, 3 0 []
)]
Conv1 (Conv2D) (None, 128, 128, 32 864 ['input_1[0][0]']
)
bn_Conv1 (BatchNormalization) (None, 128, 128, 32 128 ['Conv1[0][0]']
)
Conv1_relu (ReLU) (None, 128, 128, 32 0 ['bn_Conv1[0][0]']
)
expanded_conv_depthwise (Depth (None, 128, 128, 32 288 ['Conv1_relu[0][0]']
wiseConv2D) )
expanded_conv_depthwise_BN (Ba (None, 128, 128, 32 128 ['expanded_conv_depthwise[0][0]']
tchNormalization) )
expanded_conv_depthwise_relu ( (None, 128, 128, 32 0 ['expanded_conv_depthwise_BN[0][0
ReLU) ) ]']
expanded_conv_project (Conv2D) (None, 128, 128, 16 512 ['expanded_conv_depthwise_relu[0]
) [0]']
expanded_conv_project_BN (Batc (None, 128, 128, 16 64 ['expanded_conv_project[0][0]']
hNormalization) )
block_1_expand (Conv2D) (None, 128, 128, 96 1536 ['expanded_conv_project_BN[0][0]'
) ]
block_1_expand_BN (BatchNormal (None, 128, 128, 96 384 ['block_1_expand[0][0]']
ization) )
block_1_expand_relu (ReLU) (None, 128, 128, 96 0 ['block_1_expand_BN[0][0]']
)
block_1_pad (ZeroPadding2D) (None, 129, 129, 96 0 ['block_1_expand_relu[0][0]']
)
block_1_depthwise (DepthwiseCo (None, 64, 64, 96) 864 ['block_1_pad[0][0]']
nv2D)
block_1_depthwise_BN (BatchNor (None, 64, 64, 96) 384 ['block_1_depthwise[0][0]']
malization)
block_1_depthwise_relu (ReLU) (None, 64, 64, 96) 0 ['block_1_depthwise_BN[0][0]']
block_1_project (Conv2D) (None, 64, 64, 24) 2304 ['block_1_depthwise_relu[0][0]']
block_1_project_BN (BatchNorma (None, 64, 64, 24) 96 ['block_1_project[0][0]']
lization)
block_2_expand (Conv2D) (None, 64, 64, 144) 3456 ['block_1_project_BN[0][0]']
block_2_expand_BN (BatchNormal (None, 64, 64, 144) 576 ['block_2_expand[0][0]']
ization)
block_2_expand_relu (ReLU) (None, 64, 64, 144) 0 ['block_2_expand_BN[0][0]']
block_2_depthwise (DepthwiseCo (None, 64, 64, 144) 1296 ['block_2_expand_relu[0][0]']
nv2D)
block_2_depthwise_BN (BatchNor (None, 64, 64, 144) 576 ['block_2_depthwise[0][0]']
malization)
block_2_depthwise_relu (ReLU) (None, 64, 64, 144) 0 ['block_2_depthwise_BN[0][0]']
block_2_project (Conv2D) (None, 64, 64, 24) 3456 ['block_2_depthwise_relu[0][0]']
block_2_project_BN (BatchNorma (None, 64, 64, 24) 96 ['block_2_project[0][0]']
lization)
block_2_add (Add) (None, 64, 64, 24) 0 ['block_1_project_BN[0][0]',
'block_2_project_BN[0][0]']
block_3_expand (Conv2D) (None, 64, 64, 144) 3456 ['block_2_add[0][0]']
block_3_expand_BN (BatchNormal (None, 64, 64, 144) 576 ['block_3_expand[0][0]']
ization)
block_3_expand_relu (ReLU) (None, 64, 64, 144) 0 ['block_3_expand_BN[0][0]']
block_3_pad (ZeroPadding2D) (None, 65, 65, 144) 0 ['block_3_expand_relu[0][0]']
block_3_depthwise (DepthwiseCo (None, 32, 32, 144) 1296 ['block_3_pad[0][0]']
nv2D)
block_3_depthwise_BN (BatchNor (None, 32, 32, 144) 576 ['block_3_depthwise[0][0]']
malization)
block_3_depthwise_relu (ReLU) (None, 32, 32, 144) 0 ['block_3_depthwise_BN[0][0]']
block_3_project (Conv2D) (None, 32, 32, 32) 4608 ['block_3_depthwise_relu[0][0]']
block_3_project_BN (BatchNorma (None, 32, 32, 32) 128 ['block_3_project[0][0]']
lization)
block_4_expand (Conv2D) (None, 32, 32, 192) 6144 ['block_3_project_BN[0][0]']
block_4_expand_BN (BatchNormal (None, 32, 32, 192) 768 ['block_4_expand[0][0]']
ization)
block_4_expand_relu (ReLU) (None, 32, 32, 192) 0 ['block_4_expand_BN[0][0]']
block_4_depthwise (DepthwiseCo (None, 32, 32, 192) 1728 ['block_4_expand_relu[0][0]']
nv2D)
block_4_depthwise_BN (BatchNor (None, 32, 32, 192) 768 ['block_4_depthwise[0][0]']
malization)
block_4_depthwise_relu (ReLU) (None, 32, 32, 192) 0 ['block_4_depthwise_BN[0][0]']
block_4_project (Conv2D) (None, 32, 32, 32) 6144 ['block_4_depthwise_relu[0][0]']
block_4_project_BN (BatchNorma (None, 32, 32, 32) 128 ['block_4_project[0][0]']
lization)
block_4_add (Add) (None, 32, 32, 32) 0 ['block_3_project_BN[0][0]',
'block_4_project_BN[0][0]']
block_5_expand (Conv2D) (None, 32, 32, 192) 6144 ['block_4_add[0][0]']
block_5_expand_BN (BatchNormal (None, 32, 32, 192) 768 ['block_5_expand[0][0]']
ization)
block_5_expand_relu (ReLU) (None, 32, 32, 192) 0 ['block_5_expand_BN[0][0]']
block_5_depthwise (DepthwiseCo (None, 32, 32, 192) 1728 ['block_5_expand_relu[0][0]']
nv2D)
block_5_depthwise_BN (BatchNor (None, 32, 32, 192) 768 ['block_5_depthwise[0][0]']
malization)
block_5_depthwise_relu (ReLU) (None, 32, 32, 192) 0 ['block_5_depthwise_BN[0][0]']
block_5_project (Conv2D) (None, 32, 32, 32) 6144 ['block_5_depthwise_relu[0][0]']
block_5_project_BN (BatchNorma (None, 32, 32, 32) 128 ['block_5_project[0][0]']
lization)
block_5_add (Add) (None, 32, 32, 32) 0 ['block_4_add[0][0]',
'block_5_project_BN[0][0]']
block_6_expand (Conv2D) (None, 32, 32, 192) 6144 ['block_5_add[0][0]']
block_6_expand_BN (BatchNormal (None, 32, 32, 192) 768 ['block_6_expand[0][0]']
ization)
block_6_expand_relu (ReLU) (None, 32, 32, 192) 0 ['block_6_expand_BN[0][0]']
block_6_pad (ZeroPadding2D) (None, 33, 33, 192) 0 ['block_6_expand_relu[0][0]']
block_6_depthwise (DepthwiseCo (None, 16, 16, 192) 1728 ['block_6_pad[0][0]']
nv2D)
block_6_depthwise_BN (BatchNor (None, 16, 16, 192) 768 ['block_6_depthwise[0][0]']
malization)
block_6_depthwise_relu (ReLU) (None, 16, 16, 192) 0 ['block_6_depthwise_BN[0][0]']
block_6_project (Conv2D) (None, 16, 16, 64) 12288 ['block_6_depthwise_relu[0][0]']
block_6_project_BN (BatchNorma (None, 16, 16, 64) 256 ['block_6_project[0][0]']
lization)
block_7_expand (Conv2D) (None, 16, 16, 384) 24576 ['block_6_project_BN[0][0]']
block_7_expand_BN (BatchNormal (None, 16, 16, 384) 1536 ['block_7_expand[0][0]']
ization)
block_7_expand_relu (ReLU) (None, 16, 16, 384) 0 ['block_7_expand_BN[0][0]']
block_7_depthwise (DepthwiseCo (None, 16, 16, 384) 3456 ['block_7_expand_relu[0][0]']
nv2D)
block_7_depthwise_BN (BatchNor (None, 16, 16, 384) 1536 ['block_7_depthwise[0][0]']
malization)
block_7_depthwise_relu (ReLU) (None, 16, 16, 384) 0 ['block_7_depthwise_BN[0][0]']
block_7_project (Conv2D) (None, 16, 16, 64) 24576 ['block_7_depthwise_relu[0][0]']
block_7_project_BN (BatchNorma (None, 16, 16, 64) 256 ['block_7_project[0][0]']
lization)
block_7_add (Add) (None, 16, 16, 64) 0 ['block_6_project_BN[0][0]',
'block_7_project_BN[0][0]']
block_8_expand (Conv2D) (None, 16, 16, 384) 24576 ['block_7_add[0][0]']
block_8_expand_BN (BatchNormal (None, 16, 16, 384) 1536 ['block_8_expand[0][0]']
ization)
block_8_expand_relu (ReLU) (None, 16, 16, 384) 0 ['block_8_expand_BN[0][0]']
block_8_depthwise (DepthwiseCo (None, 16, 16, 384) 3456 ['block_8_expand_relu[0][0]']
nv2D)
block_8_depthwise_BN (BatchNor (None, 16, 16, 384) 1536 ['block_8_depthwise[0][0]']
malization)
block_8_depthwise_relu (ReLU) (None, 16, 16, 384) 0 ['block_8_depthwise_BN[0][0]']
block_8_project (Conv2D) (None, 16, 16, 64) 24576 ['block_8_depthwise_relu[0][0]']
block_8_project_BN (BatchNorma (None, 16, 16, 64) 256 ['block_8_project[0][0]']
lization)
block_8_add (Add) (None, 16, 16, 64) 0 ['block_7_add[0][0]',
'block_8_project_BN[0][0]']
block_9_expand (Conv2D) (None, 16, 16, 384) 24576 ['block_8_add[0][0]']
block_9_expand_BN (BatchNormal (None, 16, 16, 384) 1536 ['block_9_expand[0][0]']
ization)
block_9_expand_relu (ReLU) (None, 16, 16, 384) 0 ['block_9_expand_BN[0][0]']
block_9_depthwise (DepthwiseCo (None, 16, 16, 384) 3456 ['block_9_expand_relu[0][0]']
nv2D)
block_9_depthwise_BN (BatchNor (None, 16, 16, 384) 1536 ['block_9_depthwise[0][0]']
malization)
block_9_depthwise_relu (ReLU) (None, 16, 16, 384) 0 ['block_9_depthwise_BN[0][0]']
block_9_project (Conv2D) (None, 16, 16, 64) 24576 ['block_9_depthwise_relu[0][0]']
block_9_project_BN (BatchNorma (None, 16, 16, 64) 256 ['block_9_project[0][0]']
lization)
block_9_add (Add) (None, 16, 16, 64) 0 ['block_8_add[0][0]',
'block_9_project_BN[0][0]']
block_10_expand (Conv2D) (None, 16, 16, 384) 24576 ['block_9_add[0][0]']
block_10_expand_BN (BatchNorma (None, 16, 16, 384) 1536 ['block_10_expand[0][0]']
lization)
block_10_expand_relu (ReLU) (None, 16, 16, 384) 0 ['block_10_expand_BN[0][0]']
block_10_depthwise (DepthwiseC (None, 16, 16, 384) 3456 ['block_10_expand_relu[0][0]']
onv2D)
block_10_depthwise_BN (BatchNo (None, 16, 16, 384) 1536 ['block_10_depthwise[0][0]']
rmalization)
block_10_depthwise_relu (ReLU) (None, 16, 16, 384) 0 ['block_10_depthwise_BN[0][0]']
block_10_project (Conv2D) (None, 16, 16, 96) 36864 ['block_10_depthwise_relu[0][0]']
block_10_project_BN (BatchNorm (None, 16, 16, 96) 384 ['block_10_project[0][0]']
alization)
block_11_expand (Conv2D) (None, 16, 16, 576) 55296 ['block_10_project_BN[0][0]']
block_11_expand_BN (BatchNorma (None, 16, 16, 576) 2304 ['block_11_expand[0][0]']
lization)
block_11_expand_relu (ReLU) (None, 16, 16, 576) 0 ['block_11_expand_BN[0][0]']
block_11_depthwise (DepthwiseC (None, 16, 16, 576) 5184 ['block_11_expand_relu[0][0]']
onv2D)
block_11_depthwise_BN (BatchNo (None, 16, 16, 576) 2304 ['block_11_depthwise[0][0]']
rmalization)
block_11_depthwise_relu (ReLU) (None, 16, 16, 576) 0 ['block_11_depthwise_BN[0][0]']
block_11_project (Conv2D) (None, 16, 16, 96) 55296 ['block_11_depthwise_relu[0][0]']
block_11_project_BN (BatchNorm (None, 16, 16, 96) 384 ['block_11_project[0][0]']
alization)
block_11_add (Add) (None, 16, 16, 96) 0 ['block_10_project_BN[0][0]',
'block_11_project_BN[0][0]']
block_12_expand (Conv2D) (None, 16, 16, 576) 55296 ['block_11_add[0][0]']
block_12_expand_BN (BatchNorma (None, 16, 16, 576) 2304 ['block_12_expand[0][0]']
lization)
block_12_expand_relu (ReLU) (None, 16, 16, 576) 0 ['block_12_expand_BN[0][0]']
block_12_depthwise (DepthwiseC (None, 16, 16, 576) 5184 ['block_12_expand_relu[0][0]']
onv2D)
block_12_depthwise_BN (BatchNo (None, 16, 16, 576) 2304 ['block_12_depthwise[0][0]']
rmalization)
block_12_depthwise_relu (ReLU) (None, 16, 16, 576) 0 ['block_12_depthwise_BN[0][0]']
block_12_project (Conv2D) (None, 16, 16, 96) 55296 ['block_12_depthwise_relu[0][0]']
block_12_project_BN (BatchNorm (None, 16, 16, 96) 384 ['block_12_project[0][0]']
alization)
block_12_add (Add) (None, 16, 16, 96) 0 ['block_11_add[0][0]',
'block_12_project_BN[0][0]']
block_13_expand (Conv2D) (None, 16, 16, 576) 55296 ['block_12_add[0][0]']
block_13_expand_BN (BatchNorma (None, 16, 16, 576) 2304 ['block_13_expand[0][0]']
lization)
block_13_expand_relu (ReLU) (None, 16, 16, 576) 0 ['block_13_expand_BN[0][0]']
block_13_pad (ZeroPadding2D) (None, 17, 17, 576) 0 ['block_13_expand_relu[0][0]']
block_13_depthwise (DepthwiseC (None, 8, 8, 576) 5184 ['block_13_pad[0][0]']
onv2D)
block_13_depthwise_BN (BatchNo (None, 8, 8, 576) 2304 ['block_13_depthwise[0][0]']
rmalization)
block_13_depthwise_relu (ReLU) (None, 8, 8, 576) 0 ['block_13_depthwise_BN[0][0]']
block_13_project (Conv2D) (None, 8, 8, 160) 92160 ['block_13_depthwise_relu[0][0]']
block_13_project_BN (BatchNorm (None, 8, 8, 160) 640 ['block_13_project[0][0]']
alization)
block_14_expand (Conv2D) (None, 8, 8, 960) 153600 ['block_13_project_BN[0][0]']
block_14_expand_BN (BatchNorma (None, 8, 8, 960) 3840 ['block_14_expand[0][0]']
lization)
block_14_expand_relu (ReLU) (None, 8, 8, 960) 0 ['block_14_expand_BN[0][0]']
block_14_depthwise (DepthwiseC (None, 8, 8, 960) 8640 ['block_14_expand_relu[0][0]']
onv2D)
block_14_depthwise_BN (BatchNo (None, 8, 8, 960) 3840 ['block_14_depthwise[0][0]']
rmalization)
block_14_depthwise_relu (ReLU) (None, 8, 8, 960) 0 ['block_14_depthwise_BN[0][0]']
block_14_project (Conv2D) (None, 8, 8, 160) 153600 ['block_14_depthwise_relu[0][0]']
block_14_project_BN (BatchNorm (None, 8, 8, 160) 640 ['block_14_project[0][0]']
alization)
block_14_add (Add) (None, 8, 8, 160) 0 ['block_13_project_BN[0][0]',
'block_14_project_BN[0][0]']
block_15_expand (Conv2D) (None, 8, 8, 960) 153600 ['block_14_add[0][0]']
block_15_expand_BN (BatchNorma (None, 8, 8, 960) 3840 ['block_15_expand[0][0]']
lization)
block_15_expand_relu (ReLU) (None, 8, 8, 960) 0 ['block_15_expand_BN[0][0]']
block_15_depthwise (DepthwiseC (None, 8, 8, 960) 8640 ['block_15_expand_relu[0][0]']
onv2D)
block_15_depthwise_BN (BatchNo (None, 8, 8, 960) 3840 ['block_15_depthwise[0][0]']
rmalization)
block_15_depthwise_relu (ReLU) (None, 8, 8, 960) 0 ['block_15_depthwise_BN[0][0]']
block_15_project (Conv2D) (None, 8, 8, 160) 153600 ['block_15_depthwise_relu[0][0]']
block_15_project_BN (BatchNorm (None, 8, 8, 160) 640 ['block_15_project[0][0]']
alization)
block_15_add (Add) (None, 8, 8, 160) 0 ['block_14_add[0][0]',
'block_15_project_BN[0][0]']
block_16_expand (Conv2D) (None, 8, 8, 960) 153600 ['block_15_add[0][0]']
block_16_expand_BN (BatchNorma (None, 8, 8, 960) 3840 ['block_16_expand[0][0]']
lization)
block_16_expand_relu (ReLU) (None, 8, 8, 960) 0 ['block_16_expand_BN[0][0]']
block_16_depthwise (DepthwiseC (None, 8, 8, 960) 8640 ['block_16_expand_relu[0][0]']
onv2D)
block_16_depthwise_BN (BatchNo (None, 8, 8, 960) 3840 ['block_16_depthwise[0][0]']
rmalization)
block_16_depthwise_relu (ReLU) (None, 8, 8, 960) 0 ['block_16_depthwise_BN[0][0]']
block_16_project (Conv2D) (None, 8, 8, 320) 307200 ['block_16_depthwise_relu[0][0]']
block_16_project_BN (BatchNorm (None, 8, 8, 320) 1280 ['block_16_project[0][0]']
alization)
Conv_1 (Conv2D) (None, 8, 8, 1280) 409600 ['block_16_project_BN[0][0]']
Conv_1_bn (BatchNormalization) (None, 8, 8, 1280) 5120 ['Conv_1[0][0]']
out_relu (ReLU) (None, 8, 8, 1280) 0 ['Conv_1_bn[0][0]']
flatten_2 (Flatten) (None, 81920) 0 ['out_relu[0][0]']
dense_6 (Dense) (None, 6) 491526 ['flatten_2[0][0]']
==================================================================================================
Total params: 2,749,510
Trainable params: 2,715,398
Non-trainable params: 34,112
__________________________________________________________________________________________________
hist4 = model4.fit(train_set, validation_data=test_set, epochs=50, steps_per_epoch=len(train_set), validation_steps=len(test_set))
Epoch 1/50 850/850 [==============================] - 45s 50ms/step - loss: 9.2999 - accuracy: 0.6327 - f1_m: 0.6300 - precision_m: 0.6359 - recall_m: 0.6271 - val_loss: 2.2661 - val_accuracy: 0.1797 - val_f1_m: 0.1802 - val_precision_m: 0.2151 - val_recall_m: 0.1627 Epoch 2/50 850/850 [==============================] - 49s 58ms/step - loss: 0.4325 - accuracy: 0.8952 - f1_m: 0.8935 - precision_m: 0.8971 - recall_m: 0.8918 - val_loss: 3.3847 - val_accuracy: 0.1590 - val_f1_m: 0.1222 - val_precision_m: 0.1288 - val_recall_m: 0.1189 Epoch 3/50 850/850 [==============================] - 42s 49ms/step - loss: 0.2073 - accuracy: 0.9294 - f1_m: 0.9286 - precision_m: 0.9294 - recall_m: 0.9282 - val_loss: 2.4920 - val_accuracy: 0.1689 - val_f1_m: 0.1695 - val_precision_m: 0.1708 - val_recall_m: 0.1689 Epoch 4/50 850/850 [==============================] - 43s 51ms/step - loss: 0.1340 - accuracy: 0.9482 - f1_m: 0.9476 - precision_m: 0.9488 - recall_m: 0.9471 - val_loss: 1.7359 - val_accuracy: 0.2797 - val_f1_m: 0.2522 - val_precision_m: 0.2783 - val_recall_m: 0.2392 Epoch 5/50 850/850 [==============================] - 42s 49ms/step - loss: 0.1326 - accuracy: 0.9411 - f1_m: 0.9400 - precision_m: 0.9412 - recall_m: 0.9394 - val_loss: 0.5979 - val_accuracy: 0.7311 - val_f1_m: 0.6546 - val_precision_m: 0.7637 - val_recall_m: 0.6000 Epoch 6/50 850/850 [==============================] - 42s 49ms/step - loss: 0.1128 - accuracy: 0.9564 - f1_m: 0.9559 - precision_m: 0.9571 - recall_m: 0.9553 - val_loss: 0.3801 - val_accuracy: 0.8071 - val_f1_m: 0.7980 - val_precision_m: 0.8250 - val_recall_m: 0.7844 Epoch 7/50 850/850 [==============================] - 43s 51ms/step - loss: 0.1235 - accuracy: 0.9464 - f1_m: 0.9459 - precision_m: 0.9471 - recall_m: 0.9453 - val_loss: 0.2377 - val_accuracy: 0.8991 - val_f1_m: 0.8958 - val_precision_m: 0.9080 - val_recall_m: 0.8896 Epoch 8/50 850/850 [==============================] - 42s 50ms/step - loss: 0.1113 - accuracy: 0.9541 - f1_m: 0.9535 - precision_m: 0.9547 - recall_m: 0.9529 - val_loss: 0.1501 - val_accuracy: 0.9410 - val_f1_m: 0.9412 - val_precision_m: 0.9425 - val_recall_m: 0.9406 Epoch 9/50 850/850 [==============================] - 46s 54ms/step - loss: 0.1055 - accuracy: 0.9553 - f1_m: 0.9553 - precision_m: 0.9553 - recall_m: 0.9553 - val_loss: 0.1498 - val_accuracy: 0.9401 - val_f1_m: 0.9393 - val_precision_m: 0.9406 - val_recall_m: 0.9387 Epoch 10/50 850/850 [==============================] - 42s 50ms/step - loss: 0.0957 - accuracy: 0.9594 - f1_m: 0.9590 - precision_m: 0.9594 - recall_m: 0.9588 - val_loss: 0.1597 - val_accuracy: 0.9401 - val_f1_m: 0.9385 - val_precision_m: 0.9401 - val_recall_m: 0.9377 Epoch 11/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0977 - accuracy: 0.9529 - f1_m: 0.9535 - precision_m: 0.9547 - recall_m: 0.9529 - val_loss: 0.1838 - val_accuracy: 0.9142 - val_f1_m: 0.9135 - val_precision_m: 0.9245 - val_recall_m: 0.9080 Epoch 12/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0949 - accuracy: 0.9547 - f1_m: 0.9549 - precision_m: 0.9553 - recall_m: 0.9547 - val_loss: 0.1513 - val_accuracy: 0.9217 - val_f1_m: 0.9189 - val_precision_m: 0.9274 - val_recall_m: 0.9146 Epoch 13/50 850/850 [==============================] - 43s 50ms/step - loss: 0.0942 - accuracy: 0.9517 - f1_m: 0.9522 - precision_m: 0.9529 - recall_m: 0.9518 - val_loss: 0.2069 - val_accuracy: 0.8995 - val_f1_m: 0.8995 - val_precision_m: 0.9014 - val_recall_m: 0.8986 Epoch 14/50 850/850 [==============================] - 43s 50ms/step - loss: 0.0984 - accuracy: 0.9570 - f1_m: 0.9567 - precision_m: 0.9571 - recall_m: 0.9565 - val_loss: 0.1738 - val_accuracy: 0.9127 - val_f1_m: 0.9093 - val_precision_m: 0.9278 - val_recall_m: 0.9000 Epoch 15/50 850/850 [==============================] - 44s 52ms/step - loss: 0.0906 - accuracy: 0.9600 - f1_m: 0.9600 - precision_m: 0.9600 - recall_m: 0.9600 - val_loss: 0.1648 - val_accuracy: 0.9160 - val_f1_m: 0.9160 - val_precision_m: 0.9208 - val_recall_m: 0.9137 Epoch 16/50 850/850 [==============================] - 41s 49ms/step - loss: 0.0912 - accuracy: 0.9594 - f1_m: 0.9594 - precision_m: 0.9594 - recall_m: 0.9594 - val_loss: 0.1625 - val_accuracy: 0.9406 - val_f1_m: 0.9399 - val_precision_m: 0.9406 - val_recall_m: 0.9396 Epoch 17/50 850/850 [==============================] - 44s 52ms/step - loss: 0.0855 - accuracy: 0.9576 - f1_m: 0.9576 - precision_m: 0.9576 - recall_m: 0.9576 - val_loss: 0.1639 - val_accuracy: 0.9335 - val_f1_m: 0.9299 - val_precision_m: 0.9415 - val_recall_m: 0.9241 Epoch 18/50 850/850 [==============================] - 52s 61ms/step - loss: 0.0876 - accuracy: 0.9547 - f1_m: 0.9547 - precision_m: 0.9547 - recall_m: 0.9547 - val_loss: 0.1757 - val_accuracy: 0.9283 - val_f1_m: 0.9277 - val_precision_m: 0.9349 - val_recall_m: 0.9241 Epoch 19/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0925 - accuracy: 0.9588 - f1_m: 0.9584 - precision_m: 0.9588 - recall_m: 0.9582 - val_loss: 0.1748 - val_accuracy: 0.9358 - val_f1_m: 0.9349 - val_precision_m: 0.9396 - val_recall_m: 0.9325 Epoch 20/50 850/850 [==============================] - 42s 50ms/step - loss: 0.0883 - accuracy: 0.9600 - f1_m: 0.9598 - precision_m: 0.9606 - recall_m: 0.9594 - val_loss: 0.1806 - val_accuracy: 0.9297 - val_f1_m: 0.9286 - val_precision_m: 0.9330 - val_recall_m: 0.9264 Epoch 21/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0859 - accuracy: 0.9600 - f1_m: 0.9596 - precision_m: 0.9600 - recall_m: 0.9594 - val_loss: 0.2350 - val_accuracy: 0.8925 - val_f1_m: 0.8926 - val_precision_m: 0.9108 - val_recall_m: 0.8835 Epoch 22/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0849 - accuracy: 0.9612 - f1_m: 0.9612 - precision_m: 0.9612 - recall_m: 0.9612 - val_loss: 0.1835 - val_accuracy: 0.9231 - val_f1_m: 0.9184 - val_precision_m: 0.9278 - val_recall_m: 0.9137 Epoch 23/50 850/850 [==============================] - 43s 51ms/step - loss: 0.0830 - accuracy: 0.9588 - f1_m: 0.9580 - precision_m: 0.9588 - recall_m: 0.9576 - val_loss: 0.2258 - val_accuracy: 0.9137 - val_f1_m: 0.9041 - val_precision_m: 0.9189 - val_recall_m: 0.8967 Epoch 24/50 850/850 [==============================] - 43s 51ms/step - loss: 0.0902 - accuracy: 0.9553 - f1_m: 0.9553 - precision_m: 0.9553 - recall_m: 0.9553 - val_loss: 0.2020 - val_accuracy: 0.9259 - val_f1_m: 0.9206 - val_precision_m: 0.9316 - val_recall_m: 0.9151 Epoch 25/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0811 - accuracy: 0.9629 - f1_m: 0.9631 - precision_m: 0.9635 - recall_m: 0.9629 - val_loss: 0.1652 - val_accuracy: 0.9203 - val_f1_m: 0.9203 - val_precision_m: 0.9250 - val_recall_m: 0.9179 Epoch 26/50 850/850 [==============================] - 43s 51ms/step - loss: 0.0780 - accuracy: 0.9582 - f1_m: 0.9584 - precision_m: 0.9588 - recall_m: 0.9582 - val_loss: 0.1495 - val_accuracy: 0.9335 - val_f1_m: 0.9319 - val_precision_m: 0.9429 - val_recall_m: 0.9264 Epoch 27/50 850/850 [==============================] - 42s 50ms/step - loss: 0.0850 - accuracy: 0.9600 - f1_m: 0.9586 - precision_m: 0.9594 - recall_m: 0.9582 - val_loss: 0.1529 - val_accuracy: 0.9283 - val_f1_m: 0.9253 - val_precision_m: 0.9354 - val_recall_m: 0.9203 Epoch 28/50 850/850 [==============================] - 41s 49ms/step - loss: 0.0856 - accuracy: 0.9588 - f1_m: 0.9588 - precision_m: 0.9588 - recall_m: 0.9588 - val_loss: 0.1570 - val_accuracy: 0.9392 - val_f1_m: 0.9371 - val_precision_m: 0.9406 - val_recall_m: 0.9354 Epoch 29/50 850/850 [==============================] - 43s 50ms/step - loss: 0.0783 - accuracy: 0.9600 - f1_m: 0.9600 - precision_m: 0.9612 - recall_m: 0.9594 - val_loss: 0.1656 - val_accuracy: 0.9344 - val_f1_m: 0.9233 - val_precision_m: 0.9434 - val_recall_m: 0.9132 Epoch 30/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0805 - accuracy: 0.9564 - f1_m: 0.9565 - precision_m: 0.9565 - recall_m: 0.9565 - val_loss: 0.1760 - val_accuracy: 0.9311 - val_f1_m: 0.9286 - val_precision_m: 0.9415 - val_recall_m: 0.9222 Epoch 31/50 850/850 [==============================] - 43s 51ms/step - loss: 0.0839 - accuracy: 0.9588 - f1_m: 0.9590 - precision_m: 0.9594 - recall_m: 0.9588 - val_loss: 0.1990 - val_accuracy: 0.9099 - val_f1_m: 0.9096 - val_precision_m: 0.9118 - val_recall_m: 0.9085 Epoch 32/50 850/850 [==============================] - 48s 56ms/step - loss: 0.0748 - accuracy: 0.9676 - f1_m: 0.9673 - precision_m: 0.9676 - recall_m: 0.9671 - val_loss: 0.1868 - val_accuracy: 0.9113 - val_f1_m: 0.9072 - val_precision_m: 0.9226 - val_recall_m: 0.8995 Epoch 33/50 850/850 [==============================] - 43s 50ms/step - loss: 0.0787 - accuracy: 0.9600 - f1_m: 0.9600 - precision_m: 0.9600 - recall_m: 0.9600 - val_loss: 0.1735 - val_accuracy: 0.9288 - val_f1_m: 0.9281 - val_precision_m: 0.9335 - val_recall_m: 0.9255 Epoch 34/50 850/850 [==============================] - 50s 59ms/step - loss: 0.0745 - accuracy: 0.9606 - f1_m: 0.9606 - precision_m: 0.9606 - recall_m: 0.9606 - val_loss: 0.1721 - val_accuracy: 0.9316 - val_f1_m: 0.9289 - val_precision_m: 0.9396 - val_recall_m: 0.9236 Epoch 35/50 850/850 [==============================] - 44s 52ms/step - loss: 0.0818 - accuracy: 0.9564 - f1_m: 0.9567 - precision_m: 0.9571 - recall_m: 0.9565 - val_loss: 0.1576 - val_accuracy: 0.9387 - val_f1_m: 0.9366 - val_precision_m: 0.9410 - val_recall_m: 0.9344 Epoch 36/50 850/850 [==============================] - 43s 50ms/step - loss: 0.0735 - accuracy: 0.9617 - f1_m: 0.9618 - precision_m: 0.9618 - recall_m: 0.9618 - val_loss: 0.1699 - val_accuracy: 0.9401 - val_f1_m: 0.9381 - val_precision_m: 0.9425 - val_recall_m: 0.9358 Epoch 37/50 850/850 [==============================] - 45s 53ms/step - loss: 0.0749 - accuracy: 0.9635 - f1_m: 0.9631 - precision_m: 0.9635 - recall_m: 0.9629 - val_loss: 0.1701 - val_accuracy: 0.9330 - val_f1_m: 0.9319 - val_precision_m: 0.9373 - val_recall_m: 0.9292 Epoch 38/50 850/850 [==============================] - 42s 50ms/step - loss: 0.0682 - accuracy: 0.9670 - f1_m: 0.9671 - precision_m: 0.9671 - recall_m: 0.9671 - val_loss: 0.1639 - val_accuracy: 0.9420 - val_f1_m: 0.9409 - val_precision_m: 0.9434 - val_recall_m: 0.9396 Epoch 39/50 850/850 [==============================] - 44s 52ms/step - loss: 0.0762 - accuracy: 0.9612 - f1_m: 0.9608 - precision_m: 0.9612 - recall_m: 0.9606 - val_loss: 0.2100 - val_accuracy: 0.9057 - val_f1_m: 0.9036 - val_precision_m: 0.9118 - val_recall_m: 0.8995 Epoch 40/50 850/850 [==============================] - 44s 52ms/step - loss: 0.0742 - accuracy: 0.9606 - f1_m: 0.9602 - precision_m: 0.9606 - recall_m: 0.9600 - val_loss: 0.1787 - val_accuracy: 0.9255 - val_f1_m: 0.9222 - val_precision_m: 0.9448 - val_recall_m: 0.9108 Epoch 41/50 850/850 [==============================] - 43s 51ms/step - loss: 0.0703 - accuracy: 0.9694 - f1_m: 0.9688 - precision_m: 0.9700 - recall_m: 0.9682 - val_loss: 0.2398 - val_accuracy: 0.8925 - val_f1_m: 0.8912 - val_precision_m: 0.8981 - val_recall_m: 0.8877 Epoch 42/50 850/850 [==============================] - 42s 50ms/step - loss: 0.0711 - accuracy: 0.9623 - f1_m: 0.9622 - precision_m: 0.9629 - recall_m: 0.9618 - val_loss: 0.1611 - val_accuracy: 0.9349 - val_f1_m: 0.9332 - val_precision_m: 0.9392 - val_recall_m: 0.9302 Epoch 43/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0703 - accuracy: 0.9653 - f1_m: 0.9649 - precision_m: 0.9653 - recall_m: 0.9647 - val_loss: 0.1741 - val_accuracy: 0.9278 - val_f1_m: 0.9264 - val_precision_m: 0.9302 - val_recall_m: 0.9245 Epoch 44/50 850/850 [==============================] - 43s 50ms/step - loss: 0.0727 - accuracy: 0.9635 - f1_m: 0.9629 - precision_m: 0.9641 - recall_m: 0.9624 - val_loss: 0.1768 - val_accuracy: 0.9349 - val_f1_m: 0.9333 - val_precision_m: 0.9387 - val_recall_m: 0.9307 Epoch 45/50 850/850 [==============================] - 43s 50ms/step - loss: 0.0780 - accuracy: 0.9617 - f1_m: 0.9620 - precision_m: 0.9624 - recall_m: 0.9618 - val_loss: 0.1806 - val_accuracy: 0.9377 - val_f1_m: 0.9379 - val_precision_m: 0.9401 - val_recall_m: 0.9368 Epoch 46/50 850/850 [==============================] - 41s 49ms/step - loss: 0.0707 - accuracy: 0.9629 - f1_m: 0.9629 - precision_m: 0.9629 - recall_m: 0.9629 - val_loss: 0.2666 - val_accuracy: 0.8854 - val_f1_m: 0.8789 - val_precision_m: 0.8962 - val_recall_m: 0.8703 Epoch 47/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0707 - accuracy: 0.9623 - f1_m: 0.9624 - precision_m: 0.9624 - recall_m: 0.9624 - val_loss: 0.1760 - val_accuracy: 0.9387 - val_f1_m: 0.9390 - val_precision_m: 0.9406 - val_recall_m: 0.9382 Epoch 48/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0621 - accuracy: 0.9682 - f1_m: 0.9682 - precision_m: 0.9682 - recall_m: 0.9682 - val_loss: 0.1568 - val_accuracy: 0.9406 - val_f1_m: 0.9352 - val_precision_m: 0.9443 - val_recall_m: 0.9307 Epoch 49/50 850/850 [==============================] - 43s 51ms/step - loss: 0.0664 - accuracy: 0.9694 - f1_m: 0.9690 - precision_m: 0.9694 - recall_m: 0.9688 - val_loss: 0.1667 - val_accuracy: 0.9382 - val_f1_m: 0.9376 - val_precision_m: 0.9401 - val_recall_m: 0.9363 Epoch 50/50 850/850 [==============================] - 45s 53ms/step - loss: 0.0705 - accuracy: 0.9629 - f1_m: 0.9629 - precision_m: 0.9629 - recall_m: 0.9629 - val_loss: 0.1555 - val_accuracy: 0.9410 - val_f1_m: 0.9395 - val_precision_m: 0.9439 - val_recall_m: 0.9373
dl_acc = hist4.history["val_accuracy"][49]
dl_prec = hist4.history["val_precision_m"][49]
dl_rec = hist4.history["val_recall_m"][49]
dl_f1 = hist4.history["val_f1_m"][49]
storeResults('MobileNet',dl_acc,dl_prec,dl_rec,dl_f1)
x=hist4
plt.figure(figsize=(20,10))
plt.subplot(1, 2, 1)
plt.suptitle('Optimizer : adam', fontsize=10)
plt.ylabel('Loss', fontsize=16)
plt.plot(x.history['loss'], label='Training Loss')
plt.plot(x.history['val_loss'], label='Validation Loss')
plt.legend(loc='upper right')
plt.subplot(1, 2, 2)
plt.ylabel('Accuracy', fontsize=16)
plt.plot(x.history['accuracy'], label='Training Accuracy')
plt.plot(x.history['val_accuracy'], label='Validation Accuracy')
plt.legend(loc='lower right')
plt.show()
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Dense, GlobalAveragePooling2D
# Defining the pretrained base model
base = Xception(include_top=False, weights='imagenet', input_shape=(256,256,3))
x = base.output
x = GlobalAveragePooling2D()(x)
# Defining the head of the model where the prediction is conducted
head = Dense(6, activation='softmax')(x)
# Combining base and head
model5 = Model(inputs=base.input, outputs=head)
model5.compile(optimizer='sgd',
loss = 'categorical_crossentropy',
metrics=["accuracy",f1_m,precision_m, recall_m])
model5.summary()
Model: "model_1"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_3 (InputLayer) [(None, 256, 256, 3 0 []
)]
block1_conv1 (Conv2D) (None, 127, 127, 32 864 ['input_3[0][0]']
)
block1_conv1_bn (BatchNormaliz (None, 127, 127, 32 128 ['block1_conv1[0][0]']
ation) )
block1_conv1_act (Activation) (None, 127, 127, 32 0 ['block1_conv1_bn[0][0]']
)
block1_conv2 (Conv2D) (None, 125, 125, 64 18432 ['block1_conv1_act[0][0]']
)
block1_conv2_bn (BatchNormaliz (None, 125, 125, 64 256 ['block1_conv2[0][0]']
ation) )
block1_conv2_act (Activation) (None, 125, 125, 64 0 ['block1_conv2_bn[0][0]']
)
block2_sepconv1 (SeparableConv (None, 125, 125, 12 8768 ['block1_conv2_act[0][0]']
2D) 8)
block2_sepconv1_bn (BatchNorma (None, 125, 125, 12 512 ['block2_sepconv1[0][0]']
lization) 8)
block2_sepconv2_act (Activatio (None, 125, 125, 12 0 ['block2_sepconv1_bn[0][0]']
n) 8)
block2_sepconv2 (SeparableConv (None, 125, 125, 12 17536 ['block2_sepconv2_act[0][0]']
2D) 8)
block2_sepconv2_bn (BatchNorma (None, 125, 125, 12 512 ['block2_sepconv2[0][0]']
lization) 8)
conv2d_8 (Conv2D) (None, 63, 63, 128) 8192 ['block1_conv2_act[0][0]']
block2_pool (MaxPooling2D) (None, 63, 63, 128) 0 ['block2_sepconv2_bn[0][0]']
batch_normalization_4 (BatchNo (None, 63, 63, 128) 512 ['conv2d_8[0][0]']
rmalization)
add_12 (Add) (None, 63, 63, 128) 0 ['block2_pool[0][0]',
'batch_normalization_4[0][0]']
block3_sepconv1_act (Activatio (None, 63, 63, 128) 0 ['add_12[0][0]']
n)
block3_sepconv1 (SeparableConv (None, 63, 63, 256) 33920 ['block3_sepconv1_act[0][0]']
2D)
block3_sepconv1_bn (BatchNorma (None, 63, 63, 256) 1024 ['block3_sepconv1[0][0]']
lization)
block3_sepconv2_act (Activatio (None, 63, 63, 256) 0 ['block3_sepconv1_bn[0][0]']
n)
block3_sepconv2 (SeparableConv (None, 63, 63, 256) 67840 ['block3_sepconv2_act[0][0]']
2D)
block3_sepconv2_bn (BatchNorma (None, 63, 63, 256) 1024 ['block3_sepconv2[0][0]']
lization)
conv2d_9 (Conv2D) (None, 32, 32, 256) 32768 ['add_12[0][0]']
block3_pool (MaxPooling2D) (None, 32, 32, 256) 0 ['block3_sepconv2_bn[0][0]']
batch_normalization_5 (BatchNo (None, 32, 32, 256) 1024 ['conv2d_9[0][0]']
rmalization)
add_13 (Add) (None, 32, 32, 256) 0 ['block3_pool[0][0]',
'batch_normalization_5[0][0]']
block4_sepconv1_act (Activatio (None, 32, 32, 256) 0 ['add_13[0][0]']
n)
block4_sepconv1 (SeparableConv (None, 32, 32, 728) 188672 ['block4_sepconv1_act[0][0]']
2D)
block4_sepconv1_bn (BatchNorma (None, 32, 32, 728) 2912 ['block4_sepconv1[0][0]']
lization)
block4_sepconv2_act (Activatio (None, 32, 32, 728) 0 ['block4_sepconv1_bn[0][0]']
n)
block4_sepconv2 (SeparableConv (None, 32, 32, 728) 536536 ['block4_sepconv2_act[0][0]']
2D)
block4_sepconv2_bn (BatchNorma (None, 32, 32, 728) 2912 ['block4_sepconv2[0][0]']
lization)
conv2d_10 (Conv2D) (None, 16, 16, 728) 186368 ['add_13[0][0]']
block4_pool (MaxPooling2D) (None, 16, 16, 728) 0 ['block4_sepconv2_bn[0][0]']
batch_normalization_6 (BatchNo (None, 16, 16, 728) 2912 ['conv2d_10[0][0]']
rmalization)
add_14 (Add) (None, 16, 16, 728) 0 ['block4_pool[0][0]',
'batch_normalization_6[0][0]']
block5_sepconv1_act (Activatio (None, 16, 16, 728) 0 ['add_14[0][0]']
n)
block5_sepconv1 (SeparableConv (None, 16, 16, 728) 536536 ['block5_sepconv1_act[0][0]']
2D)
block5_sepconv1_bn (BatchNorma (None, 16, 16, 728) 2912 ['block5_sepconv1[0][0]']
lization)
block5_sepconv2_act (Activatio (None, 16, 16, 728) 0 ['block5_sepconv1_bn[0][0]']
n)
block5_sepconv2 (SeparableConv (None, 16, 16, 728) 536536 ['block5_sepconv2_act[0][0]']
2D)
block5_sepconv2_bn (BatchNorma (None, 16, 16, 728) 2912 ['block5_sepconv2[0][0]']
lization)
block5_sepconv3_act (Activatio (None, 16, 16, 728) 0 ['block5_sepconv2_bn[0][0]']
n)
block5_sepconv3 (SeparableConv (None, 16, 16, 728) 536536 ['block5_sepconv3_act[0][0]']
2D)
block5_sepconv3_bn (BatchNorma (None, 16, 16, 728) 2912 ['block5_sepconv3[0][0]']
lization)
add_15 (Add) (None, 16, 16, 728) 0 ['block5_sepconv3_bn[0][0]',
'add_14[0][0]']
block6_sepconv1_act (Activatio (None, 16, 16, 728) 0 ['add_15[0][0]']
n)
block6_sepconv1 (SeparableConv (None, 16, 16, 728) 536536 ['block6_sepconv1_act[0][0]']
2D)
block6_sepconv1_bn (BatchNorma (None, 16, 16, 728) 2912 ['block6_sepconv1[0][0]']
lization)
block6_sepconv2_act (Activatio (None, 16, 16, 728) 0 ['block6_sepconv1_bn[0][0]']
n)
block6_sepconv2 (SeparableConv (None, 16, 16, 728) 536536 ['block6_sepconv2_act[0][0]']
2D)
block6_sepconv2_bn (BatchNorma (None, 16, 16, 728) 2912 ['block6_sepconv2[0][0]']
lization)
block6_sepconv3_act (Activatio (None, 16, 16, 728) 0 ['block6_sepconv2_bn[0][0]']
n)
block6_sepconv3 (SeparableConv (None, 16, 16, 728) 536536 ['block6_sepconv3_act[0][0]']
2D)
block6_sepconv3_bn (BatchNorma (None, 16, 16, 728) 2912 ['block6_sepconv3[0][0]']
lization)
add_16 (Add) (None, 16, 16, 728) 0 ['block6_sepconv3_bn[0][0]',
'add_15[0][0]']
block7_sepconv1_act (Activatio (None, 16, 16, 728) 0 ['add_16[0][0]']
n)
block7_sepconv1 (SeparableConv (None, 16, 16, 728) 536536 ['block7_sepconv1_act[0][0]']
2D)
block7_sepconv1_bn (BatchNorma (None, 16, 16, 728) 2912 ['block7_sepconv1[0][0]']
lization)
block7_sepconv2_act (Activatio (None, 16, 16, 728) 0 ['block7_sepconv1_bn[0][0]']
n)
block7_sepconv2 (SeparableConv (None, 16, 16, 728) 536536 ['block7_sepconv2_act[0][0]']
2D)
block7_sepconv2_bn (BatchNorma (None, 16, 16, 728) 2912 ['block7_sepconv2[0][0]']
lization)
block7_sepconv3_act (Activatio (None, 16, 16, 728) 0 ['block7_sepconv2_bn[0][0]']
n)
block7_sepconv3 (SeparableConv (None, 16, 16, 728) 536536 ['block7_sepconv3_act[0][0]']
2D)
block7_sepconv3_bn (BatchNorma (None, 16, 16, 728) 2912 ['block7_sepconv3[0][0]']
lization)
add_17 (Add) (None, 16, 16, 728) 0 ['block7_sepconv3_bn[0][0]',
'add_16[0][0]']
block8_sepconv1_act (Activatio (None, 16, 16, 728) 0 ['add_17[0][0]']
n)
block8_sepconv1 (SeparableConv (None, 16, 16, 728) 536536 ['block8_sepconv1_act[0][0]']
2D)
block8_sepconv1_bn (BatchNorma (None, 16, 16, 728) 2912 ['block8_sepconv1[0][0]']
lization)
block8_sepconv2_act (Activatio (None, 16, 16, 728) 0 ['block8_sepconv1_bn[0][0]']
n)
block8_sepconv2 (SeparableConv (None, 16, 16, 728) 536536 ['block8_sepconv2_act[0][0]']
2D)
block8_sepconv2_bn (BatchNorma (None, 16, 16, 728) 2912 ['block8_sepconv2[0][0]']
lization)
block8_sepconv3_act (Activatio (None, 16, 16, 728) 0 ['block8_sepconv2_bn[0][0]']
n)
block8_sepconv3 (SeparableConv (None, 16, 16, 728) 536536 ['block8_sepconv3_act[0][0]']
2D)
block8_sepconv3_bn (BatchNorma (None, 16, 16, 728) 2912 ['block8_sepconv3[0][0]']
lization)
add_18 (Add) (None, 16, 16, 728) 0 ['block8_sepconv3_bn[0][0]',
'add_17[0][0]']
block9_sepconv1_act (Activatio (None, 16, 16, 728) 0 ['add_18[0][0]']
n)
block9_sepconv1 (SeparableConv (None, 16, 16, 728) 536536 ['block9_sepconv1_act[0][0]']
2D)
block9_sepconv1_bn (BatchNorma (None, 16, 16, 728) 2912 ['block9_sepconv1[0][0]']
lization)
block9_sepconv2_act (Activatio (None, 16, 16, 728) 0 ['block9_sepconv1_bn[0][0]']
n)
block9_sepconv2 (SeparableConv (None, 16, 16, 728) 536536 ['block9_sepconv2_act[0][0]']
2D)
block9_sepconv2_bn (BatchNorma (None, 16, 16, 728) 2912 ['block9_sepconv2[0][0]']
lization)
block9_sepconv3_act (Activatio (None, 16, 16, 728) 0 ['block9_sepconv2_bn[0][0]']
n)
block9_sepconv3 (SeparableConv (None, 16, 16, 728) 536536 ['block9_sepconv3_act[0][0]']
2D)
block9_sepconv3_bn (BatchNorma (None, 16, 16, 728) 2912 ['block9_sepconv3[0][0]']
lization)
add_19 (Add) (None, 16, 16, 728) 0 ['block9_sepconv3_bn[0][0]',
'add_18[0][0]']
block10_sepconv1_act (Activati (None, 16, 16, 728) 0 ['add_19[0][0]']
on)
block10_sepconv1 (SeparableCon (None, 16, 16, 728) 536536 ['block10_sepconv1_act[0][0]']
v2D)
block10_sepconv1_bn (BatchNorm (None, 16, 16, 728) 2912 ['block10_sepconv1[0][0]']
alization)
block10_sepconv2_act (Activati (None, 16, 16, 728) 0 ['block10_sepconv1_bn[0][0]']
on)
block10_sepconv2 (SeparableCon (None, 16, 16, 728) 536536 ['block10_sepconv2_act[0][0]']
v2D)
block10_sepconv2_bn (BatchNorm (None, 16, 16, 728) 2912 ['block10_sepconv2[0][0]']
alization)
block10_sepconv3_act (Activati (None, 16, 16, 728) 0 ['block10_sepconv2_bn[0][0]']
on)
block10_sepconv3 (SeparableCon (None, 16, 16, 728) 536536 ['block10_sepconv3_act[0][0]']
v2D)
block10_sepconv3_bn (BatchNorm (None, 16, 16, 728) 2912 ['block10_sepconv3[0][0]']
alization)
add_20 (Add) (None, 16, 16, 728) 0 ['block10_sepconv3_bn[0][0]',
'add_19[0][0]']
block11_sepconv1_act (Activati (None, 16, 16, 728) 0 ['add_20[0][0]']
on)
block11_sepconv1 (SeparableCon (None, 16, 16, 728) 536536 ['block11_sepconv1_act[0][0]']
v2D)
block11_sepconv1_bn (BatchNorm (None, 16, 16, 728) 2912 ['block11_sepconv1[0][0]']
alization)
block11_sepconv2_act (Activati (None, 16, 16, 728) 0 ['block11_sepconv1_bn[0][0]']
on)
block11_sepconv2 (SeparableCon (None, 16, 16, 728) 536536 ['block11_sepconv2_act[0][0]']
v2D)
block11_sepconv2_bn (BatchNorm (None, 16, 16, 728) 2912 ['block11_sepconv2[0][0]']
alization)
block11_sepconv3_act (Activati (None, 16, 16, 728) 0 ['block11_sepconv2_bn[0][0]']
on)
block11_sepconv3 (SeparableCon (None, 16, 16, 728) 536536 ['block11_sepconv3_act[0][0]']
v2D)
block11_sepconv3_bn (BatchNorm (None, 16, 16, 728) 2912 ['block11_sepconv3[0][0]']
alization)
add_21 (Add) (None, 16, 16, 728) 0 ['block11_sepconv3_bn[0][0]',
'add_20[0][0]']
block12_sepconv1_act (Activati (None, 16, 16, 728) 0 ['add_21[0][0]']
on)
block12_sepconv1 (SeparableCon (None, 16, 16, 728) 536536 ['block12_sepconv1_act[0][0]']
v2D)
block12_sepconv1_bn (BatchNorm (None, 16, 16, 728) 2912 ['block12_sepconv1[0][0]']
alization)
block12_sepconv2_act (Activati (None, 16, 16, 728) 0 ['block12_sepconv1_bn[0][0]']
on)
block12_sepconv2 (SeparableCon (None, 16, 16, 728) 536536 ['block12_sepconv2_act[0][0]']
v2D)
block12_sepconv2_bn (BatchNorm (None, 16, 16, 728) 2912 ['block12_sepconv2[0][0]']
alization)
block12_sepconv3_act (Activati (None, 16, 16, 728) 0 ['block12_sepconv2_bn[0][0]']
on)
block12_sepconv3 (SeparableCon (None, 16, 16, 728) 536536 ['block12_sepconv3_act[0][0]']
v2D)
block12_sepconv3_bn (BatchNorm (None, 16, 16, 728) 2912 ['block12_sepconv3[0][0]']
alization)
add_22 (Add) (None, 16, 16, 728) 0 ['block12_sepconv3_bn[0][0]',
'add_21[0][0]']
block13_sepconv1_act (Activati (None, 16, 16, 728) 0 ['add_22[0][0]']
on)
block13_sepconv1 (SeparableCon (None, 16, 16, 728) 536536 ['block13_sepconv1_act[0][0]']
v2D)
block13_sepconv1_bn (BatchNorm (None, 16, 16, 728) 2912 ['block13_sepconv1[0][0]']
alization)
block13_sepconv2_act (Activati (None, 16, 16, 728) 0 ['block13_sepconv1_bn[0][0]']
on)
block13_sepconv2 (SeparableCon (None, 16, 16, 1024 752024 ['block13_sepconv2_act[0][0]']
v2D) )
block13_sepconv2_bn (BatchNorm (None, 16, 16, 1024 4096 ['block13_sepconv2[0][0]']
alization) )
conv2d_11 (Conv2D) (None, 8, 8, 1024) 745472 ['add_22[0][0]']
block13_pool (MaxPooling2D) (None, 8, 8, 1024) 0 ['block13_sepconv2_bn[0][0]']
batch_normalization_7 (BatchNo (None, 8, 8, 1024) 4096 ['conv2d_11[0][0]']
rmalization)
add_23 (Add) (None, 8, 8, 1024) 0 ['block13_pool[0][0]',
'batch_normalization_7[0][0]']
block14_sepconv1 (SeparableCon (None, 8, 8, 1536) 1582080 ['add_23[0][0]']
v2D)
block14_sepconv1_bn (BatchNorm (None, 8, 8, 1536) 6144 ['block14_sepconv1[0][0]']
alization)
block14_sepconv1_act (Activati (None, 8, 8, 1536) 0 ['block14_sepconv1_bn[0][0]']
on)
block14_sepconv2 (SeparableCon (None, 8, 8, 2048) 3159552 ['block14_sepconv1_act[0][0]']
v2D)
block14_sepconv2_bn (BatchNorm (None, 8, 8, 2048) 8192 ['block14_sepconv2[0][0]']
alization)
block14_sepconv2_act (Activati (None, 8, 8, 2048) 0 ['block14_sepconv2_bn[0][0]']
on)
global_average_pooling2d (Glob (None, 2048) 0 ['block14_sepconv2_act[0][0]']
alAveragePooling2D)
dense_7 (Dense) (None, 6) 12294 ['global_average_pooling2d[0][0]'
]
==================================================================================================
Total params: 20,873,774
Trainable params: 20,819,246
Non-trainable params: 54,528
__________________________________________________________________________________________________
hist5 = model5.fit(train_set, validation_data=test_set, epochs=50, steps_per_epoch=len(train_set), validation_steps=len(test_set))
Epoch 1/50 850/850 [==============================] - 46s 50ms/step - loss: 0.6399 - accuracy: 0.7905 - f1_m: 0.6796 - precision_m: 0.7294 - recall_m: 0.6547 - val_loss: 0.1900 - val_accuracy: 0.9396 - val_f1_m: 0.9371 - val_precision_m: 0.9425 - val_recall_m: 0.9344 Epoch 2/50 850/850 [==============================] - 43s 50ms/step - loss: 0.1726 - accuracy: 0.9482 - f1_m: 0.9373 - precision_m: 0.9494 - recall_m: 0.9312 - val_loss: 0.1640 - val_accuracy: 0.9594 - val_f1_m: 0.9571 - val_precision_m: 0.9599 - val_recall_m: 0.9557 Epoch 3/50 850/850 [==============================] - 42s 50ms/step - loss: 0.1009 - accuracy: 0.9688 - f1_m: 0.9684 - precision_m: 0.9724 - recall_m: 0.9665 - val_loss: 0.1410 - val_accuracy: 0.9660 - val_f1_m: 0.9662 - val_precision_m: 0.9665 - val_recall_m: 0.9660 Epoch 4/50 850/850 [==============================] - 43s 50ms/step - loss: 0.0692 - accuracy: 0.9788 - f1_m: 0.9786 - precision_m: 0.9794 - recall_m: 0.9782 - val_loss: 0.1316 - val_accuracy: 0.9684 - val_f1_m: 0.9686 - val_precision_m: 0.9689 - val_recall_m: 0.9684 Epoch 5/50 850/850 [==============================] - 43s 51ms/step - loss: 0.0571 - accuracy: 0.9812 - f1_m: 0.9812 - precision_m: 0.9812 - recall_m: 0.9812 - val_loss: 0.1692 - val_accuracy: 0.9675 - val_f1_m: 0.9676 - val_precision_m: 0.9679 - val_recall_m: 0.9675 Epoch 6/50 850/850 [==============================] - 42s 50ms/step - loss: 0.0549 - accuracy: 0.9800 - f1_m: 0.9800 - precision_m: 0.9812 - recall_m: 0.9794 - val_loss: 0.1263 - val_accuracy: 0.9703 - val_f1_m: 0.9701 - val_precision_m: 0.9708 - val_recall_m: 0.9698 Epoch 7/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0475 - accuracy: 0.9835 - f1_m: 0.9833 - precision_m: 0.9841 - recall_m: 0.9829 - val_loss: 0.1369 - val_accuracy: 0.9736 - val_f1_m: 0.9736 - val_precision_m: 0.9736 - val_recall_m: 0.9736 Epoch 8/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0472 - accuracy: 0.9847 - f1_m: 0.9845 - precision_m: 0.9853 - recall_m: 0.9841 - val_loss: 0.1520 - val_accuracy: 0.9698 - val_f1_m: 0.9695 - val_precision_m: 0.9698 - val_recall_m: 0.9693 Epoch 9/50 850/850 [==============================] - 42s 50ms/step - loss: 0.0473 - accuracy: 0.9818 - f1_m: 0.9820 - precision_m: 0.9824 - recall_m: 0.9818 - val_loss: 0.1505 - val_accuracy: 0.9708 - val_f1_m: 0.9709 - val_precision_m: 0.9712 - val_recall_m: 0.9708 Epoch 10/50 850/850 [==============================] - 42s 50ms/step - loss: 0.0474 - accuracy: 0.9835 - f1_m: 0.9835 - precision_m: 0.9847 - recall_m: 0.9829 - val_loss: 0.1405 - val_accuracy: 0.9726 - val_f1_m: 0.9728 - val_precision_m: 0.9731 - val_recall_m: 0.9726 Epoch 11/50 850/850 [==============================] - 43s 50ms/step - loss: 0.0417 - accuracy: 0.9853 - f1_m: 0.9847 - precision_m: 0.9859 - recall_m: 0.9841 - val_loss: 0.1273 - val_accuracy: 0.9736 - val_f1_m: 0.9736 - val_precision_m: 0.9736 - val_recall_m: 0.9736 Epoch 12/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0484 - accuracy: 0.9823 - f1_m: 0.9816 - precision_m: 0.9824 - recall_m: 0.9812 - val_loss: 0.1302 - val_accuracy: 0.9741 - val_f1_m: 0.9737 - val_precision_m: 0.9741 - val_recall_m: 0.9736 Epoch 13/50 850/850 [==============================] - 47s 56ms/step - loss: 0.0375 - accuracy: 0.9859 - f1_m: 0.9851 - precision_m: 0.9859 - recall_m: 0.9847 - val_loss: 0.1620 - val_accuracy: 0.9703 - val_f1_m: 0.9700 - val_precision_m: 0.9703 - val_recall_m: 0.9698 Epoch 14/50 850/850 [==============================] - 43s 50ms/step - loss: 0.0336 - accuracy: 0.9841 - f1_m: 0.9843 - precision_m: 0.9847 - recall_m: 0.9841 - val_loss: 0.1779 - val_accuracy: 0.9722 - val_f1_m: 0.9722 - val_precision_m: 0.9722 - val_recall_m: 0.9722 Epoch 15/50 850/850 [==============================] - 42s 50ms/step - loss: 0.0271 - accuracy: 0.9888 - f1_m: 0.9890 - precision_m: 0.9894 - recall_m: 0.9888 - val_loss: 0.1685 - val_accuracy: 0.9736 - val_f1_m: 0.9736 - val_precision_m: 0.9736 - val_recall_m: 0.9736 Epoch 16/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0333 - accuracy: 0.9847 - f1_m: 0.9843 - precision_m: 0.9847 - recall_m: 0.9841 - val_loss: 0.1143 - val_accuracy: 0.9722 - val_f1_m: 0.9725 - val_precision_m: 0.9741 - val_recall_m: 0.9717 Epoch 17/50 850/850 [==============================] - 42s 50ms/step - loss: 0.0332 - accuracy: 0.9859 - f1_m: 0.9855 - precision_m: 0.9859 - recall_m: 0.9853 - val_loss: 0.1355 - val_accuracy: 0.9736 - val_f1_m: 0.9737 - val_precision_m: 0.9741 - val_recall_m: 0.9736 Epoch 18/50 850/850 [==============================] - 44s 52ms/step - loss: 0.0297 - accuracy: 0.9859 - f1_m: 0.9855 - precision_m: 0.9859 - recall_m: 0.9853 - val_loss: 0.1199 - val_accuracy: 0.9726 - val_f1_m: 0.9726 - val_precision_m: 0.9726 - val_recall_m: 0.9726 Epoch 19/50 850/850 [==============================] - 46s 54ms/step - loss: 0.0321 - accuracy: 0.9853 - f1_m: 0.9849 - precision_m: 0.9853 - recall_m: 0.9847 - val_loss: 0.1129 - val_accuracy: 0.9741 - val_f1_m: 0.9734 - val_precision_m: 0.9741 - val_recall_m: 0.9731 Epoch 20/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0247 - accuracy: 0.9859 - f1_m: 0.9859 - precision_m: 0.9859 - recall_m: 0.9859 - val_loss: 0.1408 - val_accuracy: 0.9726 - val_f1_m: 0.9726 - val_precision_m: 0.9726 - val_recall_m: 0.9726 Epoch 21/50 850/850 [==============================] - 43s 50ms/step - loss: 0.0295 - accuracy: 0.9876 - f1_m: 0.9876 - precision_m: 0.9876 - recall_m: 0.9876 - val_loss: 0.1659 - val_accuracy: 0.9736 - val_f1_m: 0.9733 - val_precision_m: 0.9736 - val_recall_m: 0.9731 Epoch 22/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0262 - accuracy: 0.9865 - f1_m: 0.9861 - precision_m: 0.9865 - recall_m: 0.9859 - val_loss: 0.1772 - val_accuracy: 0.9731 - val_f1_m: 0.9731 - val_precision_m: 0.9731 - val_recall_m: 0.9731 Epoch 23/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0250 - accuracy: 0.9865 - f1_m: 0.9865 - precision_m: 0.9865 - recall_m: 0.9865 - val_loss: 0.1275 - val_accuracy: 0.9745 - val_f1_m: 0.9745 - val_precision_m: 0.9745 - val_recall_m: 0.9745 Epoch 24/50 850/850 [==============================] - 42s 50ms/step - loss: 0.0280 - accuracy: 0.9876 - f1_m: 0.9878 - precision_m: 0.9882 - recall_m: 0.9876 - val_loss: 0.1241 - val_accuracy: 0.9679 - val_f1_m: 0.9681 - val_precision_m: 0.9693 - val_recall_m: 0.9675 Epoch 25/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0280 - accuracy: 0.9865 - f1_m: 0.9857 - precision_m: 0.9865 - recall_m: 0.9853 - val_loss: 0.1923 - val_accuracy: 0.9703 - val_f1_m: 0.9703 - val_precision_m: 0.9703 - val_recall_m: 0.9703 Epoch 26/50 850/850 [==============================] - 45s 53ms/step - loss: 0.0256 - accuracy: 0.9865 - f1_m: 0.9867 - precision_m: 0.9871 - recall_m: 0.9865 - val_loss: 0.1329 - val_accuracy: 0.9693 - val_f1_m: 0.9693 - val_precision_m: 0.9693 - val_recall_m: 0.9693 Epoch 27/50 850/850 [==============================] - 45s 53ms/step - loss: 0.0300 - accuracy: 0.9823 - f1_m: 0.9822 - precision_m: 0.9829 - recall_m: 0.9818 - val_loss: 0.1358 - val_accuracy: 0.9689 - val_f1_m: 0.9689 - val_precision_m: 0.9689 - val_recall_m: 0.9689 Epoch 28/50 850/850 [==============================] - 45s 53ms/step - loss: 0.0253 - accuracy: 0.9865 - f1_m: 0.9865 - precision_m: 0.9865 - recall_m: 0.9865 - val_loss: 0.1421 - val_accuracy: 0.9712 - val_f1_m: 0.9709 - val_precision_m: 0.9712 - val_recall_m: 0.9708 Epoch 29/50 850/850 [==============================] - 45s 53ms/step - loss: 0.0241 - accuracy: 0.9871 - f1_m: 0.9869 - precision_m: 0.9876 - recall_m: 0.9865 - val_loss: 0.1234 - val_accuracy: 0.9736 - val_f1_m: 0.9733 - val_precision_m: 0.9745 - val_recall_m: 0.9726 Epoch 30/50 850/850 [==============================] - 42s 50ms/step - loss: 0.0251 - accuracy: 0.9841 - f1_m: 0.9841 - precision_m: 0.9841 - recall_m: 0.9841 - val_loss: 0.1233 - val_accuracy: 0.9731 - val_f1_m: 0.9728 - val_precision_m: 0.9731 - val_recall_m: 0.9726 Epoch 31/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0255 - accuracy: 0.9865 - f1_m: 0.9851 - precision_m: 0.9871 - recall_m: 0.9841 - val_loss: 0.1297 - val_accuracy: 0.9726 - val_f1_m: 0.9728 - val_precision_m: 0.9731 - val_recall_m: 0.9726 Epoch 32/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0252 - accuracy: 0.9882 - f1_m: 0.9873 - precision_m: 0.9888 - recall_m: 0.9865 - val_loss: 0.1436 - val_accuracy: 0.9684 - val_f1_m: 0.9684 - val_precision_m: 0.9684 - val_recall_m: 0.9684 Epoch 33/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0223 - accuracy: 0.9835 - f1_m: 0.9839 - precision_m: 0.9847 - recall_m: 0.9835 - val_loss: 0.1675 - val_accuracy: 0.9750 - val_f1_m: 0.9748 - val_precision_m: 0.9755 - val_recall_m: 0.9745 Epoch 34/50 850/850 [==============================] - 44s 51ms/step - loss: 0.0253 - accuracy: 0.9876 - f1_m: 0.9873 - precision_m: 0.9876 - recall_m: 0.9871 - val_loss: 0.1481 - val_accuracy: 0.9741 - val_f1_m: 0.9741 - val_precision_m: 0.9741 - val_recall_m: 0.9741 Epoch 35/50 850/850 [==============================] - 45s 52ms/step - loss: 0.0218 - accuracy: 0.9841 - f1_m: 0.9841 - precision_m: 0.9841 - recall_m: 0.9841 - val_loss: 0.1355 - val_accuracy: 0.9736 - val_f1_m: 0.9737 - val_precision_m: 0.9741 - val_recall_m: 0.9736 Epoch 36/50 850/850 [==============================] - 45s 53ms/step - loss: 0.0211 - accuracy: 0.9882 - f1_m: 0.9884 - precision_m: 0.9888 - recall_m: 0.9882 - val_loss: 0.1676 - val_accuracy: 0.9741 - val_f1_m: 0.9741 - val_precision_m: 0.9741 - val_recall_m: 0.9741 Epoch 37/50 850/850 [==============================] - 50s 59ms/step - loss: 0.0250 - accuracy: 0.9859 - f1_m: 0.9859 - precision_m: 0.9859 - recall_m: 0.9859 - val_loss: 0.1288 - val_accuracy: 0.9708 - val_f1_m: 0.9708 - val_precision_m: 0.9708 - val_recall_m: 0.9708 Epoch 38/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0250 - accuracy: 0.9841 - f1_m: 0.9843 - precision_m: 0.9847 - recall_m: 0.9841 - val_loss: 0.1463 - val_accuracy: 0.9722 - val_f1_m: 0.9720 - val_precision_m: 0.9726 - val_recall_m: 0.9717 Epoch 39/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0235 - accuracy: 0.9841 - f1_m: 0.9833 - precision_m: 0.9841 - recall_m: 0.9829 - val_loss: 0.1604 - val_accuracy: 0.9741 - val_f1_m: 0.9741 - val_precision_m: 0.9741 - val_recall_m: 0.9741 Epoch 40/50 850/850 [==============================] - 41s 49ms/step - loss: 0.0226 - accuracy: 0.9876 - f1_m: 0.9878 - precision_m: 0.9882 - recall_m: 0.9876 - val_loss: 0.1413 - val_accuracy: 0.9736 - val_f1_m: 0.9733 - val_precision_m: 0.9736 - val_recall_m: 0.9731 Epoch 41/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0198 - accuracy: 0.9876 - f1_m: 0.9876 - precision_m: 0.9876 - recall_m: 0.9876 - val_loss: 0.2074 - val_accuracy: 0.9722 - val_f1_m: 0.9722 - val_precision_m: 0.9722 - val_recall_m: 0.9722 Epoch 42/50 850/850 [==============================] - 42s 50ms/step - loss: 0.0237 - accuracy: 0.9876 - f1_m: 0.9871 - precision_m: 0.9871 - recall_m: 0.9871 - val_loss: 0.1830 - val_accuracy: 0.9726 - val_f1_m: 0.9726 - val_precision_m: 0.9726 - val_recall_m: 0.9726 Epoch 43/50 850/850 [==============================] - 42s 50ms/step - loss: 0.0224 - accuracy: 0.9894 - f1_m: 0.9894 - precision_m: 0.9894 - recall_m: 0.9894 - val_loss: 0.1773 - val_accuracy: 0.9722 - val_f1_m: 0.9722 - val_precision_m: 0.9722 - val_recall_m: 0.9722 Epoch 44/50 850/850 [==============================] - 43s 50ms/step - loss: 0.0208 - accuracy: 0.9853 - f1_m: 0.9849 - precision_m: 0.9853 - recall_m: 0.9847 - val_loss: 0.1658 - val_accuracy: 0.9726 - val_f1_m: 0.9726 - val_precision_m: 0.9726 - val_recall_m: 0.9726 Epoch 45/50 850/850 [==============================] - 46s 54ms/step - loss: 0.0205 - accuracy: 0.9871 - f1_m: 0.9873 - precision_m: 0.9876 - recall_m: 0.9871 - val_loss: 0.1860 - val_accuracy: 0.9703 - val_f1_m: 0.9700 - val_precision_m: 0.9703 - val_recall_m: 0.9698 Epoch 46/50 850/850 [==============================] - 43s 50ms/step - loss: 0.0231 - accuracy: 0.9859 - f1_m: 0.9859 - precision_m: 0.9859 - recall_m: 0.9859 - val_loss: 0.1345 - val_accuracy: 0.9736 - val_f1_m: 0.9726 - val_precision_m: 0.9736 - val_recall_m: 0.9722 Epoch 47/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0221 - accuracy: 0.9871 - f1_m: 0.9867 - precision_m: 0.9871 - recall_m: 0.9865 - val_loss: 0.1391 - val_accuracy: 0.9712 - val_f1_m: 0.9709 - val_precision_m: 0.9712 - val_recall_m: 0.9708 Epoch 48/50 850/850 [==============================] - 43s 50ms/step - loss: 0.0217 - accuracy: 0.9829 - f1_m: 0.9825 - precision_m: 0.9829 - recall_m: 0.9824 - val_loss: 0.1434 - val_accuracy: 0.9717 - val_f1_m: 0.9720 - val_precision_m: 0.9726 - val_recall_m: 0.9717 Epoch 49/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0217 - accuracy: 0.9865 - f1_m: 0.9863 - precision_m: 0.9871 - recall_m: 0.9859 - val_loss: 0.1322 - val_accuracy: 0.9717 - val_f1_m: 0.9720 - val_precision_m: 0.9726 - val_recall_m: 0.9717 Epoch 50/50 850/850 [==============================] - 42s 49ms/step - loss: 0.0202 - accuracy: 0.9871 - f1_m: 0.9867 - precision_m: 0.9871 - recall_m: 0.9865 - val_loss: 0.1718 - val_accuracy: 0.9731 - val_f1_m: 0.9731 - val_precision_m: 0.9731 - val_recall_m: 0.9731
model5.save('xception.h5')
dl_acc = hist5.history["val_accuracy"][49]
dl_prec = hist5.history["val_precision_m"][49]
dl_rec = hist5.history["val_recall_m"][49]
dl_f1 = hist5.history["val_f1_m"][49]
storeResults('Xception',dl_acc,dl_prec,dl_rec,dl_f1)
x=hist5
plt.figure(figsize=(20,10))
plt.subplot(1, 2, 1)
plt.suptitle('Optimizer : adam', fontsize=10)
plt.ylabel('Loss', fontsize=16)
plt.plot(x.history['loss'], label='Training Loss')
plt.plot(x.history['val_loss'], label='Validation Loss')
plt.legend(loc='upper right')
plt.subplot(1, 2, 2)
plt.ylabel('Accuracy', fontsize=16)
plt.plot(x.history['accuracy'], label='Training Accuracy')
plt.plot(x.history['val_accuracy'], label='Validation Accuracy')
plt.legend(loc='lower right')
plt.show()
# Defining the pretrained base model
base = NASNetMobile(include_top=False, weights='imagenet', input_shape=(256,256,3))
x = base.output
x = GlobalAveragePooling2D()(x)
# Defining the head of the model where the prediction is conducted
head = Dense(6, activation='softmax')(x)
# Combining base and head
model6 = Model(inputs=base.input, outputs=head)
model6.compile(optimizer='sgd',
loss = 'categorical_crossentropy',
metrics=["accuracy",f1_m,precision_m, recall_m])
model6.summary()
Model: "model_2"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_4 (InputLayer) [(None, 256, 256, 3 0 []
)]
stem_conv1 (Conv2D) (None, 127, 127, 32 864 ['input_4[0][0]']
)
stem_bn1 (BatchNormalization) (None, 127, 127, 32 128 ['stem_conv1[0][0]']
)
activation (Activation) (None, 127, 127, 32 0 ['stem_bn1[0][0]']
)
reduction_conv_1_stem_1 (Conv2 (None, 127, 127, 11 352 ['activation[0][0]']
D) )
reduction_bn_1_stem_1 (BatchNo (None, 127, 127, 11 44 ['reduction_conv_1_stem_1[0][0]']
rmalization) )
activation_1 (Activation) (None, 127, 127, 11 0 ['reduction_bn_1_stem_1[0][0]']
)
activation_3 (Activation) (None, 127, 127, 32 0 ['stem_bn1[0][0]']
)
separable_conv_1_pad_reduction (None, 131, 131, 11 0 ['activation_1[0][0]']
_left1_stem_1 (ZeroPadding2D) )
separable_conv_1_pad_reduction (None, 133, 133, 32 0 ['activation_3[0][0]']
_right1_stem_1 (ZeroPadding2D) )
separable_conv_1_reduction_lef (None, 64, 64, 11) 396 ['separable_conv_1_pad_reduction_
t1_stem_1 (SeparableConv2D) left1_stem_1[0][0]']
separable_conv_1_reduction_rig (None, 64, 64, 11) 1920 ['separable_conv_1_pad_reduction_
ht1_stem_1 (SeparableConv2D) right1_stem_1[0][0]']
separable_conv_1_bn_reduction_ (None, 64, 64, 11) 44 ['separable_conv_1_reduction_left
left1_stem_1 (BatchNormalizati 1_stem_1[0][0]']
on)
separable_conv_1_bn_reduction_ (None, 64, 64, 11) 44 ['separable_conv_1_reduction_righ
right1_stem_1 (BatchNormalizat t1_stem_1[0][0]']
ion)
activation_2 (Activation) (None, 64, 64, 11) 0 ['separable_conv_1_bn_reduction_l
eft1_stem_1[0][0]']
activation_4 (Activation) (None, 64, 64, 11) 0 ['separable_conv_1_bn_reduction_r
ight1_stem_1[0][0]']
separable_conv_2_reduction_lef (None, 64, 64, 11) 396 ['activation_2[0][0]']
t1_stem_1 (SeparableConv2D)
separable_conv_2_reduction_rig (None, 64, 64, 11) 660 ['activation_4[0][0]']
ht1_stem_1 (SeparableConv2D)
activation_5 (Activation) (None, 127, 127, 32 0 ['stem_bn1[0][0]']
)
separable_conv_2_bn_reduction_ (None, 64, 64, 11) 44 ['separable_conv_2_reduction_left
left1_stem_1 (BatchNormalizati 1_stem_1[0][0]']
on)
separable_conv_2_bn_reduction_ (None, 64, 64, 11) 44 ['separable_conv_2_reduction_righ
right1_stem_1 (BatchNormalizat t1_stem_1[0][0]']
ion)
separable_conv_1_pad_reduction (None, 133, 133, 32 0 ['activation_5[0][0]']
_right2_stem_1 (ZeroPadding2D) )
activation_7 (Activation) (None, 127, 127, 32 0 ['stem_bn1[0][0]']
)
reduction_add_1_stem_1 (Add) (None, 64, 64, 11) 0 ['separable_conv_2_bn_reduction_l
eft1_stem_1[0][0]',
'separable_conv_2_bn_reduction_r
ight1_stem_1[0][0]']
separable_conv_1_reduction_rig (None, 64, 64, 11) 1920 ['separable_conv_1_pad_reduction_
ht2_stem_1 (SeparableConv2D) right2_stem_1[0][0]']
separable_conv_1_pad_reduction (None, 131, 131, 32 0 ['activation_7[0][0]']
_right3_stem_1 (ZeroPadding2D) )
activation_9 (Activation) (None, 64, 64, 11) 0 ['reduction_add_1_stem_1[0][0]']
separable_conv_1_bn_reduction_ (None, 64, 64, 11) 44 ['separable_conv_1_reduction_righ
right2_stem_1 (BatchNormalizat t2_stem_1[0][0]']
ion)
separable_conv_1_reduction_rig (None, 64, 64, 11) 1152 ['separable_conv_1_pad_reduction_
ht3_stem_1 (SeparableConv2D) right3_stem_1[0][0]']
separable_conv_1_reduction_lef (None, 64, 64, 11) 220 ['activation_9[0][0]']
t4_stem_1 (SeparableConv2D)
activation_6 (Activation) (None, 64, 64, 11) 0 ['separable_conv_1_bn_reduction_r
ight2_stem_1[0][0]']
separable_conv_1_bn_reduction_ (None, 64, 64, 11) 44 ['separable_conv_1_reduction_righ
right3_stem_1 (BatchNormalizat t3_stem_1[0][0]']
ion)
separable_conv_1_bn_reduction_ (None, 64, 64, 11) 44 ['separable_conv_1_reduction_left
left4_stem_1 (BatchNormalizati 4_stem_1[0][0]']
on)
reduction_pad_1_stem_1 (ZeroPa (None, 129, 129, 11 0 ['reduction_bn_1_stem_1[0][0]']
dding2D) )
separable_conv_2_reduction_rig (None, 64, 64, 11) 660 ['activation_6[0][0]']
ht2_stem_1 (SeparableConv2D)
activation_8 (Activation) (None, 64, 64, 11) 0 ['separable_conv_1_bn_reduction_r
ight3_stem_1[0][0]']
activation_10 (Activation) (None, 64, 64, 11) 0 ['separable_conv_1_bn_reduction_l
eft4_stem_1[0][0]']
reduction_left2_stem_1 (MaxPoo (None, 64, 64, 11) 0 ['reduction_pad_1_stem_1[0][0]']
ling2D)
separable_conv_2_bn_reduction_ (None, 64, 64, 11) 44 ['separable_conv_2_reduction_righ
right2_stem_1 (BatchNormalizat t2_stem_1[0][0]']
ion)
separable_conv_2_reduction_rig (None, 64, 64, 11) 396 ['activation_8[0][0]']
ht3_stem_1 (SeparableConv2D)
separable_conv_2_reduction_lef (None, 64, 64, 11) 220 ['activation_10[0][0]']
t4_stem_1 (SeparableConv2D)
adjust_relu_1_stem_2 (Activati (None, 127, 127, 32 0 ['stem_bn1[0][0]']
on) )
reduction_add_2_stem_1 (Add) (None, 64, 64, 11) 0 ['reduction_left2_stem_1[0][0]',
'separable_conv_2_bn_reduction_r
ight2_stem_1[0][0]']
reduction_left3_stem_1 (Averag (None, 64, 64, 11) 0 ['reduction_pad_1_stem_1[0][0]']
ePooling2D)
separable_conv_2_bn_reduction_ (None, 64, 64, 11) 44 ['separable_conv_2_reduction_righ
right3_stem_1 (BatchNormalizat t3_stem_1[0][0]']
ion)
reduction_left4_stem_1 (Averag (None, 64, 64, 11) 0 ['reduction_add_1_stem_1[0][0]']
ePooling2D)
separable_conv_2_bn_reduction_ (None, 64, 64, 11) 44 ['separable_conv_2_reduction_left
left4_stem_1 (BatchNormalizati 4_stem_1[0][0]']
on)
reduction_right5_stem_1 (MaxPo (None, 64, 64, 11) 0 ['reduction_pad_1_stem_1[0][0]']
oling2D)
zero_padding2d (ZeroPadding2D) (None, 128, 128, 32 0 ['adjust_relu_1_stem_2[0][0]']
)
reduction_add3_stem_1 (Add) (None, 64, 64, 11) 0 ['reduction_left3_stem_1[0][0]',
'separable_conv_2_bn_reduction_r
ight3_stem_1[0][0]']
add_24 (Add) (None, 64, 64, 11) 0 ['reduction_add_2_stem_1[0][0]',
'reduction_left4_stem_1[0][0]']
reduction_add4_stem_1 (Add) (None, 64, 64, 11) 0 ['separable_conv_2_bn_reduction_l
eft4_stem_1[0][0]',
'reduction_right5_stem_1[0][0]']
cropping2d (Cropping2D) (None, 127, 127, 32 0 ['zero_padding2d[0][0]']
)
reduction_concat_stem_1 (Conca (None, 64, 64, 44) 0 ['reduction_add_2_stem_1[0][0]',
tenate) 'reduction_add3_stem_1[0][0]',
'add_24[0][0]',
'reduction_add4_stem_1[0][0]']
adjust_avg_pool_1_stem_2 (Aver (None, 64, 64, 32) 0 ['adjust_relu_1_stem_2[0][0]']
agePooling2D)
adjust_avg_pool_2_stem_2 (Aver (None, 64, 64, 32) 0 ['cropping2d[0][0]']
agePooling2D)
activation_11 (Activation) (None, 64, 64, 44) 0 ['reduction_concat_stem_1[0][0]']
adjust_conv_1_stem_2 (Conv2D) (None, 64, 64, 11) 352 ['adjust_avg_pool_1_stem_2[0][0]'
]
adjust_conv_2_stem_2 (Conv2D) (None, 64, 64, 11) 352 ['adjust_avg_pool_2_stem_2[0][0]'
]
reduction_conv_1_stem_2 (Conv2 (None, 64, 64, 22) 968 ['activation_11[0][0]']
D)
concatenate (Concatenate) (None, 64, 64, 22) 0 ['adjust_conv_1_stem_2[0][0]',
'adjust_conv_2_stem_2[0][0]']
reduction_bn_1_stem_2 (BatchNo (None, 64, 64, 22) 88 ['reduction_conv_1_stem_2[0][0]']
rmalization)
adjust_bn_stem_2 (BatchNormali (None, 64, 64, 22) 88 ['concatenate[0][0]']
zation)
activation_12 (Activation) (None, 64, 64, 22) 0 ['reduction_bn_1_stem_2[0][0]']
activation_14 (Activation) (None, 64, 64, 22) 0 ['adjust_bn_stem_2[0][0]']
separable_conv_1_pad_reduction (None, 67, 67, 22) 0 ['activation_12[0][0]']
_left1_stem_2 (ZeroPadding2D)
separable_conv_1_pad_reduction (None, 69, 69, 22) 0 ['activation_14[0][0]']
_right1_stem_2 (ZeroPadding2D)
separable_conv_1_reduction_lef (None, 32, 32, 22) 1034 ['separable_conv_1_pad_reduction_
t1_stem_2 (SeparableConv2D) left1_stem_2[0][0]']
separable_conv_1_reduction_rig (None, 32, 32, 22) 1562 ['separable_conv_1_pad_reduction_
ht1_stem_2 (SeparableConv2D) right1_stem_2[0][0]']
separable_conv_1_bn_reduction_ (None, 32, 32, 22) 88 ['separable_conv_1_reduction_left
left1_stem_2 (BatchNormalizati 1_stem_2[0][0]']
on)
separable_conv_1_bn_reduction_ (None, 32, 32, 22) 88 ['separable_conv_1_reduction_righ
right1_stem_2 (BatchNormalizat t1_stem_2[0][0]']
ion)
activation_13 (Activation) (None, 32, 32, 22) 0 ['separable_conv_1_bn_reduction_l
eft1_stem_2[0][0]']
activation_15 (Activation) (None, 32, 32, 22) 0 ['separable_conv_1_bn_reduction_r
ight1_stem_2[0][0]']
separable_conv_2_reduction_lef (None, 32, 32, 22) 1034 ['activation_13[0][0]']
t1_stem_2 (SeparableConv2D)
separable_conv_2_reduction_rig (None, 32, 32, 22) 1562 ['activation_15[0][0]']
ht1_stem_2 (SeparableConv2D)
activation_16 (Activation) (None, 64, 64, 22) 0 ['adjust_bn_stem_2[0][0]']
separable_conv_2_bn_reduction_ (None, 32, 32, 22) 88 ['separable_conv_2_reduction_left
left1_stem_2 (BatchNormalizati 1_stem_2[0][0]']
on)
separable_conv_2_bn_reduction_ (None, 32, 32, 22) 88 ['separable_conv_2_reduction_righ
right1_stem_2 (BatchNormalizat t1_stem_2[0][0]']
ion)
separable_conv_1_pad_reduction (None, 69, 69, 22) 0 ['activation_16[0][0]']
_right2_stem_2 (ZeroPadding2D)
activation_18 (Activation) (None, 64, 64, 22) 0 ['adjust_bn_stem_2[0][0]']
reduction_add_1_stem_2 (Add) (None, 32, 32, 22) 0 ['separable_conv_2_bn_reduction_l
eft1_stem_2[0][0]',
'separable_conv_2_bn_reduction_r
ight1_stem_2[0][0]']
separable_conv_1_reduction_rig (None, 32, 32, 22) 1562 ['separable_conv_1_pad_reduction_
ht2_stem_2 (SeparableConv2D) right2_stem_2[0][0]']
separable_conv_1_pad_reduction (None, 67, 67, 22) 0 ['activation_18[0][0]']
_right3_stem_2 (ZeroPadding2D)
activation_20 (Activation) (None, 32, 32, 22) 0 ['reduction_add_1_stem_2[0][0]']
separable_conv_1_bn_reduction_ (None, 32, 32, 22) 88 ['separable_conv_1_reduction_righ
right2_stem_2 (BatchNormalizat t2_stem_2[0][0]']
ion)
separable_conv_1_reduction_rig (None, 32, 32, 22) 1034 ['separable_conv_1_pad_reduction_
ht3_stem_2 (SeparableConv2D) right3_stem_2[0][0]']
separable_conv_1_reduction_lef (None, 32, 32, 22) 682 ['activation_20[0][0]']
t4_stem_2 (SeparableConv2D)
activation_17 (Activation) (None, 32, 32, 22) 0 ['separable_conv_1_bn_reduction_r
ight2_stem_2[0][0]']
separable_conv_1_bn_reduction_ (None, 32, 32, 22) 88 ['separable_conv_1_reduction_righ
right3_stem_2 (BatchNormalizat t3_stem_2[0][0]']
ion)
separable_conv_1_bn_reduction_ (None, 32, 32, 22) 88 ['separable_conv_1_reduction_left
left4_stem_2 (BatchNormalizati 4_stem_2[0][0]']
on)
reduction_pad_1_stem_2 (ZeroPa (None, 65, 65, 22) 0 ['reduction_bn_1_stem_2[0][0]']
dding2D)
separable_conv_2_reduction_rig (None, 32, 32, 22) 1562 ['activation_17[0][0]']
ht2_stem_2 (SeparableConv2D)
activation_19 (Activation) (None, 32, 32, 22) 0 ['separable_conv_1_bn_reduction_r
ight3_stem_2[0][0]']
activation_21 (Activation) (None, 32, 32, 22) 0 ['separable_conv_1_bn_reduction_l
eft4_stem_2[0][0]']
reduction_left2_stem_2 (MaxPoo (None, 32, 32, 22) 0 ['reduction_pad_1_stem_2[0][0]']
ling2D)
separable_conv_2_bn_reduction_ (None, 32, 32, 22) 88 ['separable_conv_2_reduction_righ
right2_stem_2 (BatchNormalizat t2_stem_2[0][0]']
ion)
separable_conv_2_reduction_rig (None, 32, 32, 22) 1034 ['activation_19[0][0]']
ht3_stem_2 (SeparableConv2D)
separable_conv_2_reduction_lef (None, 32, 32, 22) 682 ['activation_21[0][0]']
t4_stem_2 (SeparableConv2D)
adjust_relu_1_0 (Activation) (None, 64, 64, 44) 0 ['reduction_concat_stem_1[0][0]']
reduction_add_2_stem_2 (Add) (None, 32, 32, 22) 0 ['reduction_left2_stem_2[0][0]',
'separable_conv_2_bn_reduction_r
ight2_stem_2[0][0]']
reduction_left3_stem_2 (Averag (None, 32, 32, 22) 0 ['reduction_pad_1_stem_2[0][0]']
ePooling2D)
separable_conv_2_bn_reduction_ (None, 32, 32, 22) 88 ['separable_conv_2_reduction_righ
right3_stem_2 (BatchNormalizat t3_stem_2[0][0]']
ion)
reduction_left4_stem_2 (Averag (None, 32, 32, 22) 0 ['reduction_add_1_stem_2[0][0]']
ePooling2D)
separable_conv_2_bn_reduction_ (None, 32, 32, 22) 88 ['separable_conv_2_reduction_left
left4_stem_2 (BatchNormalizati 4_stem_2[0][0]']
on)
reduction_right5_stem_2 (MaxPo (None, 32, 32, 22) 0 ['reduction_pad_1_stem_2[0][0]']
oling2D)
zero_padding2d_1 (ZeroPadding2 (None, 65, 65, 44) 0 ['adjust_relu_1_0[0][0]']
D)
reduction_add3_stem_2 (Add) (None, 32, 32, 22) 0 ['reduction_left3_stem_2[0][0]',
'separable_conv_2_bn_reduction_r
ight3_stem_2[0][0]']
add_25 (Add) (None, 32, 32, 22) 0 ['reduction_add_2_stem_2[0][0]',
'reduction_left4_stem_2[0][0]']
reduction_add4_stem_2 (Add) (None, 32, 32, 22) 0 ['separable_conv_2_bn_reduction_l
eft4_stem_2[0][0]',
'reduction_right5_stem_2[0][0]']
cropping2d_1 (Cropping2D) (None, 64, 64, 44) 0 ['zero_padding2d_1[0][0]']
reduction_concat_stem_2 (Conca (None, 32, 32, 88) 0 ['reduction_add_2_stem_2[0][0]',
tenate) 'reduction_add3_stem_2[0][0]',
'add_25[0][0]',
'reduction_add4_stem_2[0][0]']
adjust_avg_pool_1_0 (AveragePo (None, 32, 32, 44) 0 ['adjust_relu_1_0[0][0]']
oling2D)
adjust_avg_pool_2_0 (AveragePo (None, 32, 32, 44) 0 ['cropping2d_1[0][0]']
oling2D)
adjust_conv_1_0 (Conv2D) (None, 32, 32, 22) 968 ['adjust_avg_pool_1_0[0][0]']
adjust_conv_2_0 (Conv2D) (None, 32, 32, 22) 968 ['adjust_avg_pool_2_0[0][0]']
activation_22 (Activation) (None, 32, 32, 88) 0 ['reduction_concat_stem_2[0][0]']
concatenate_1 (Concatenate) (None, 32, 32, 44) 0 ['adjust_conv_1_0[0][0]',
'adjust_conv_2_0[0][0]']
normal_conv_1_0 (Conv2D) (None, 32, 32, 44) 3872 ['activation_22[0][0]']
adjust_bn_0 (BatchNormalizatio (None, 32, 32, 44) 176 ['concatenate_1[0][0]']
n)
normal_bn_1_0 (BatchNormalizat (None, 32, 32, 44) 176 ['normal_conv_1_0[0][0]']
ion)
activation_23 (Activation) (None, 32, 32, 44) 0 ['normal_bn_1_0[0][0]']
activation_25 (Activation) (None, 32, 32, 44) 0 ['adjust_bn_0[0][0]']
activation_27 (Activation) (None, 32, 32, 44) 0 ['adjust_bn_0[0][0]']
activation_29 (Activation) (None, 32, 32, 44) 0 ['adjust_bn_0[0][0]']
activation_31 (Activation) (None, 32, 32, 44) 0 ['normal_bn_1_0[0][0]']
separable_conv_1_normal_left1_ (None, 32, 32, 44) 3036 ['activation_23[0][0]']
0 (SeparableConv2D)
separable_conv_1_normal_right1 (None, 32, 32, 44) 2332 ['activation_25[0][0]']
_0 (SeparableConv2D)
separable_conv_1_normal_left2_ (None, 32, 32, 44) 3036 ['activation_27[0][0]']
0 (SeparableConv2D)
separable_conv_1_normal_right2 (None, 32, 32, 44) 2332 ['activation_29[0][0]']
_0 (SeparableConv2D)
separable_conv_1_normal_left5_ (None, 32, 32, 44) 2332 ['activation_31[0][0]']
0 (SeparableConv2D)
separable_conv_1_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_1_normal_left1_0
t1_0 (BatchNormalization) [0][0]']
separable_conv_1_bn_normal_rig (None, 32, 32, 44) 176 ['separable_conv_1_normal_right1_
ht1_0 (BatchNormalization) 0[0][0]']
separable_conv_1_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_1_normal_left2_0
t2_0 (BatchNormalization) [0][0]']
separable_conv_1_bn_normal_rig (None, 32, 32, 44) 176 ['separable_conv_1_normal_right2_
ht2_0 (BatchNormalization) 0[0][0]']
separable_conv_1_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_1_normal_left5_0
t5_0 (BatchNormalization) [0][0]']
activation_24 (Activation) (None, 32, 32, 44) 0 ['separable_conv_1_bn_normal_left
1_0[0][0]']
activation_26 (Activation) (None, 32, 32, 44) 0 ['separable_conv_1_bn_normal_righ
t1_0[0][0]']
activation_28 (Activation) (None, 32, 32, 44) 0 ['separable_conv_1_bn_normal_left
2_0[0][0]']
activation_30 (Activation) (None, 32, 32, 44) 0 ['separable_conv_1_bn_normal_righ
t2_0[0][0]']
activation_32 (Activation) (None, 32, 32, 44) 0 ['separable_conv_1_bn_normal_left
5_0[0][0]']
separable_conv_2_normal_left1_ (None, 32, 32, 44) 3036 ['activation_24[0][0]']
0 (SeparableConv2D)
separable_conv_2_normal_right1 (None, 32, 32, 44) 2332 ['activation_26[0][0]']
_0 (SeparableConv2D)
separable_conv_2_normal_left2_ (None, 32, 32, 44) 3036 ['activation_28[0][0]']
0 (SeparableConv2D)
separable_conv_2_normal_right2 (None, 32, 32, 44) 2332 ['activation_30[0][0]']
_0 (SeparableConv2D)
separable_conv_2_normal_left5_ (None, 32, 32, 44) 2332 ['activation_32[0][0]']
0 (SeparableConv2D)
separable_conv_2_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_2_normal_left1_0
t1_0 (BatchNormalization) [0][0]']
separable_conv_2_bn_normal_rig (None, 32, 32, 44) 176 ['separable_conv_2_normal_right1_
ht1_0 (BatchNormalization) 0[0][0]']
separable_conv_2_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_2_normal_left2_0
t2_0 (BatchNormalization) [0][0]']
separable_conv_2_bn_normal_rig (None, 32, 32, 44) 176 ['separable_conv_2_normal_right2_
ht2_0 (BatchNormalization) 0[0][0]']
normal_left3_0 (AveragePooling (None, 32, 32, 44) 0 ['normal_bn_1_0[0][0]']
2D)
normal_left4_0 (AveragePooling (None, 32, 32, 44) 0 ['adjust_bn_0[0][0]']
2D)
normal_right4_0 (AveragePoolin (None, 32, 32, 44) 0 ['adjust_bn_0[0][0]']
g2D)
separable_conv_2_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_2_normal_left5_0
t5_0 (BatchNormalization) [0][0]']
normal_add_1_0 (Add) (None, 32, 32, 44) 0 ['separable_conv_2_bn_normal_left
1_0[0][0]',
'separable_conv_2_bn_normal_righ
t1_0[0][0]']
normal_add_2_0 (Add) (None, 32, 32, 44) 0 ['separable_conv_2_bn_normal_left
2_0[0][0]',
'separable_conv_2_bn_normal_righ
t2_0[0][0]']
normal_add_3_0 (Add) (None, 32, 32, 44) 0 ['normal_left3_0[0][0]',
'adjust_bn_0[0][0]']
normal_add_4_0 (Add) (None, 32, 32, 44) 0 ['normal_left4_0[0][0]',
'normal_right4_0[0][0]']
normal_add_5_0 (Add) (None, 32, 32, 44) 0 ['separable_conv_2_bn_normal_left
5_0[0][0]',
'normal_bn_1_0[0][0]']
normal_concat_0 (Concatenate) (None, 32, 32, 264) 0 ['adjust_bn_0[0][0]',
'normal_add_1_0[0][0]',
'normal_add_2_0[0][0]',
'normal_add_3_0[0][0]',
'normal_add_4_0[0][0]',
'normal_add_5_0[0][0]']
activation_33 (Activation) (None, 32, 32, 88) 0 ['reduction_concat_stem_2[0][0]']
activation_34 (Activation) (None, 32, 32, 264) 0 ['normal_concat_0[0][0]']
adjust_conv_projection_1 (Conv (None, 32, 32, 44) 3872 ['activation_33[0][0]']
2D)
normal_conv_1_1 (Conv2D) (None, 32, 32, 44) 11616 ['activation_34[0][0]']
adjust_bn_1 (BatchNormalizatio (None, 32, 32, 44) 176 ['adjust_conv_projection_1[0][0]'
n) ]
normal_bn_1_1 (BatchNormalizat (None, 32, 32, 44) 176 ['normal_conv_1_1[0][0]']
ion)
activation_35 (Activation) (None, 32, 32, 44) 0 ['normal_bn_1_1[0][0]']
activation_37 (Activation) (None, 32, 32, 44) 0 ['adjust_bn_1[0][0]']
activation_39 (Activation) (None, 32, 32, 44) 0 ['adjust_bn_1[0][0]']
activation_41 (Activation) (None, 32, 32, 44) 0 ['adjust_bn_1[0][0]']
activation_43 (Activation) (None, 32, 32, 44) 0 ['normal_bn_1_1[0][0]']
separable_conv_1_normal_left1_ (None, 32, 32, 44) 3036 ['activation_35[0][0]']
1 (SeparableConv2D)
separable_conv_1_normal_right1 (None, 32, 32, 44) 2332 ['activation_37[0][0]']
_1 (SeparableConv2D)
separable_conv_1_normal_left2_ (None, 32, 32, 44) 3036 ['activation_39[0][0]']
1 (SeparableConv2D)
separable_conv_1_normal_right2 (None, 32, 32, 44) 2332 ['activation_41[0][0]']
_1 (SeparableConv2D)
separable_conv_1_normal_left5_ (None, 32, 32, 44) 2332 ['activation_43[0][0]']
1 (SeparableConv2D)
separable_conv_1_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_1_normal_left1_1
t1_1 (BatchNormalization) [0][0]']
separable_conv_1_bn_normal_rig (None, 32, 32, 44) 176 ['separable_conv_1_normal_right1_
ht1_1 (BatchNormalization) 1[0][0]']
separable_conv_1_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_1_normal_left2_1
t2_1 (BatchNormalization) [0][0]']
separable_conv_1_bn_normal_rig (None, 32, 32, 44) 176 ['separable_conv_1_normal_right2_
ht2_1 (BatchNormalization) 1[0][0]']
separable_conv_1_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_1_normal_left5_1
t5_1 (BatchNormalization) [0][0]']
activation_36 (Activation) (None, 32, 32, 44) 0 ['separable_conv_1_bn_normal_left
1_1[0][0]']
activation_38 (Activation) (None, 32, 32, 44) 0 ['separable_conv_1_bn_normal_righ
t1_1[0][0]']
activation_40 (Activation) (None, 32, 32, 44) 0 ['separable_conv_1_bn_normal_left
2_1[0][0]']
activation_42 (Activation) (None, 32, 32, 44) 0 ['separable_conv_1_bn_normal_righ
t2_1[0][0]']
activation_44 (Activation) (None, 32, 32, 44) 0 ['separable_conv_1_bn_normal_left
5_1[0][0]']
separable_conv_2_normal_left1_ (None, 32, 32, 44) 3036 ['activation_36[0][0]']
1 (SeparableConv2D)
separable_conv_2_normal_right1 (None, 32, 32, 44) 2332 ['activation_38[0][0]']
_1 (SeparableConv2D)
separable_conv_2_normal_left2_ (None, 32, 32, 44) 3036 ['activation_40[0][0]']
1 (SeparableConv2D)
separable_conv_2_normal_right2 (None, 32, 32, 44) 2332 ['activation_42[0][0]']
_1 (SeparableConv2D)
separable_conv_2_normal_left5_ (None, 32, 32, 44) 2332 ['activation_44[0][0]']
1 (SeparableConv2D)
separable_conv_2_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_2_normal_left1_1
t1_1 (BatchNormalization) [0][0]']
separable_conv_2_bn_normal_rig (None, 32, 32, 44) 176 ['separable_conv_2_normal_right1_
ht1_1 (BatchNormalization) 1[0][0]']
separable_conv_2_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_2_normal_left2_1
t2_1 (BatchNormalization) [0][0]']
separable_conv_2_bn_normal_rig (None, 32, 32, 44) 176 ['separable_conv_2_normal_right2_
ht2_1 (BatchNormalization) 1[0][0]']
normal_left3_1 (AveragePooling (None, 32, 32, 44) 0 ['normal_bn_1_1[0][0]']
2D)
normal_left4_1 (AveragePooling (None, 32, 32, 44) 0 ['adjust_bn_1[0][0]']
2D)
normal_right4_1 (AveragePoolin (None, 32, 32, 44) 0 ['adjust_bn_1[0][0]']
g2D)
separable_conv_2_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_2_normal_left5_1
t5_1 (BatchNormalization) [0][0]']
normal_add_1_1 (Add) (None, 32, 32, 44) 0 ['separable_conv_2_bn_normal_left
1_1[0][0]',
'separable_conv_2_bn_normal_righ
t1_1[0][0]']
normal_add_2_1 (Add) (None, 32, 32, 44) 0 ['separable_conv_2_bn_normal_left
2_1[0][0]',
'separable_conv_2_bn_normal_righ
t2_1[0][0]']
normal_add_3_1 (Add) (None, 32, 32, 44) 0 ['normal_left3_1[0][0]',
'adjust_bn_1[0][0]']
normal_add_4_1 (Add) (None, 32, 32, 44) 0 ['normal_left4_1[0][0]',
'normal_right4_1[0][0]']
normal_add_5_1 (Add) (None, 32, 32, 44) 0 ['separable_conv_2_bn_normal_left
5_1[0][0]',
'normal_bn_1_1[0][0]']
normal_concat_1 (Concatenate) (None, 32, 32, 264) 0 ['adjust_bn_1[0][0]',
'normal_add_1_1[0][0]',
'normal_add_2_1[0][0]',
'normal_add_3_1[0][0]',
'normal_add_4_1[0][0]',
'normal_add_5_1[0][0]']
activation_45 (Activation) (None, 32, 32, 264) 0 ['normal_concat_0[0][0]']
activation_46 (Activation) (None, 32, 32, 264) 0 ['normal_concat_1[0][0]']
adjust_conv_projection_2 (Conv (None, 32, 32, 44) 11616 ['activation_45[0][0]']
2D)
normal_conv_1_2 (Conv2D) (None, 32, 32, 44) 11616 ['activation_46[0][0]']
adjust_bn_2 (BatchNormalizatio (None, 32, 32, 44) 176 ['adjust_conv_projection_2[0][0]'
n) ]
normal_bn_1_2 (BatchNormalizat (None, 32, 32, 44) 176 ['normal_conv_1_2[0][0]']
ion)
activation_47 (Activation) (None, 32, 32, 44) 0 ['normal_bn_1_2[0][0]']
activation_49 (Activation) (None, 32, 32, 44) 0 ['adjust_bn_2[0][0]']
activation_51 (Activation) (None, 32, 32, 44) 0 ['adjust_bn_2[0][0]']
activation_53 (Activation) (None, 32, 32, 44) 0 ['adjust_bn_2[0][0]']
activation_55 (Activation) (None, 32, 32, 44) 0 ['normal_bn_1_2[0][0]']
separable_conv_1_normal_left1_ (None, 32, 32, 44) 3036 ['activation_47[0][0]']
2 (SeparableConv2D)
separable_conv_1_normal_right1 (None, 32, 32, 44) 2332 ['activation_49[0][0]']
_2 (SeparableConv2D)
separable_conv_1_normal_left2_ (None, 32, 32, 44) 3036 ['activation_51[0][0]']
2 (SeparableConv2D)
separable_conv_1_normal_right2 (None, 32, 32, 44) 2332 ['activation_53[0][0]']
_2 (SeparableConv2D)
separable_conv_1_normal_left5_ (None, 32, 32, 44) 2332 ['activation_55[0][0]']
2 (SeparableConv2D)
separable_conv_1_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_1_normal_left1_2
t1_2 (BatchNormalization) [0][0]']
separable_conv_1_bn_normal_rig (None, 32, 32, 44) 176 ['separable_conv_1_normal_right1_
ht1_2 (BatchNormalization) 2[0][0]']
separable_conv_1_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_1_normal_left2_2
t2_2 (BatchNormalization) [0][0]']
separable_conv_1_bn_normal_rig (None, 32, 32, 44) 176 ['separable_conv_1_normal_right2_
ht2_2 (BatchNormalization) 2[0][0]']
separable_conv_1_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_1_normal_left5_2
t5_2 (BatchNormalization) [0][0]']
activation_48 (Activation) (None, 32, 32, 44) 0 ['separable_conv_1_bn_normal_left
1_2[0][0]']
activation_50 (Activation) (None, 32, 32, 44) 0 ['separable_conv_1_bn_normal_righ
t1_2[0][0]']
activation_52 (Activation) (None, 32, 32, 44) 0 ['separable_conv_1_bn_normal_left
2_2[0][0]']
activation_54 (Activation) (None, 32, 32, 44) 0 ['separable_conv_1_bn_normal_righ
t2_2[0][0]']
activation_56 (Activation) (None, 32, 32, 44) 0 ['separable_conv_1_bn_normal_left
5_2[0][0]']
separable_conv_2_normal_left1_ (None, 32, 32, 44) 3036 ['activation_48[0][0]']
2 (SeparableConv2D)
separable_conv_2_normal_right1 (None, 32, 32, 44) 2332 ['activation_50[0][0]']
_2 (SeparableConv2D)
separable_conv_2_normal_left2_ (None, 32, 32, 44) 3036 ['activation_52[0][0]']
2 (SeparableConv2D)
separable_conv_2_normal_right2 (None, 32, 32, 44) 2332 ['activation_54[0][0]']
_2 (SeparableConv2D)
separable_conv_2_normal_left5_ (None, 32, 32, 44) 2332 ['activation_56[0][0]']
2 (SeparableConv2D)
separable_conv_2_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_2_normal_left1_2
t1_2 (BatchNormalization) [0][0]']
separable_conv_2_bn_normal_rig (None, 32, 32, 44) 176 ['separable_conv_2_normal_right1_
ht1_2 (BatchNormalization) 2[0][0]']
separable_conv_2_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_2_normal_left2_2
t2_2 (BatchNormalization) [0][0]']
separable_conv_2_bn_normal_rig (None, 32, 32, 44) 176 ['separable_conv_2_normal_right2_
ht2_2 (BatchNormalization) 2[0][0]']
normal_left3_2 (AveragePooling (None, 32, 32, 44) 0 ['normal_bn_1_2[0][0]']
2D)
normal_left4_2 (AveragePooling (None, 32, 32, 44) 0 ['adjust_bn_2[0][0]']
2D)
normal_right4_2 (AveragePoolin (None, 32, 32, 44) 0 ['adjust_bn_2[0][0]']
g2D)
separable_conv_2_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_2_normal_left5_2
t5_2 (BatchNormalization) [0][0]']
normal_add_1_2 (Add) (None, 32, 32, 44) 0 ['separable_conv_2_bn_normal_left
1_2[0][0]',
'separable_conv_2_bn_normal_righ
t1_2[0][0]']
normal_add_2_2 (Add) (None, 32, 32, 44) 0 ['separable_conv_2_bn_normal_left
2_2[0][0]',
'separable_conv_2_bn_normal_righ
t2_2[0][0]']
normal_add_3_2 (Add) (None, 32, 32, 44) 0 ['normal_left3_2[0][0]',
'adjust_bn_2[0][0]']
normal_add_4_2 (Add) (None, 32, 32, 44) 0 ['normal_left4_2[0][0]',
'normal_right4_2[0][0]']
normal_add_5_2 (Add) (None, 32, 32, 44) 0 ['separable_conv_2_bn_normal_left
5_2[0][0]',
'normal_bn_1_2[0][0]']
normal_concat_2 (Concatenate) (None, 32, 32, 264) 0 ['adjust_bn_2[0][0]',
'normal_add_1_2[0][0]',
'normal_add_2_2[0][0]',
'normal_add_3_2[0][0]',
'normal_add_4_2[0][0]',
'normal_add_5_2[0][0]']
activation_57 (Activation) (None, 32, 32, 264) 0 ['normal_concat_1[0][0]']
activation_58 (Activation) (None, 32, 32, 264) 0 ['normal_concat_2[0][0]']
adjust_conv_projection_3 (Conv (None, 32, 32, 44) 11616 ['activation_57[0][0]']
2D)
normal_conv_1_3 (Conv2D) (None, 32, 32, 44) 11616 ['activation_58[0][0]']
adjust_bn_3 (BatchNormalizatio (None, 32, 32, 44) 176 ['adjust_conv_projection_3[0][0]'
n) ]
normal_bn_1_3 (BatchNormalizat (None, 32, 32, 44) 176 ['normal_conv_1_3[0][0]']
ion)
activation_59 (Activation) (None, 32, 32, 44) 0 ['normal_bn_1_3[0][0]']
activation_61 (Activation) (None, 32, 32, 44) 0 ['adjust_bn_3[0][0]']
activation_63 (Activation) (None, 32, 32, 44) 0 ['adjust_bn_3[0][0]']
activation_65 (Activation) (None, 32, 32, 44) 0 ['adjust_bn_3[0][0]']
activation_67 (Activation) (None, 32, 32, 44) 0 ['normal_bn_1_3[0][0]']
separable_conv_1_normal_left1_ (None, 32, 32, 44) 3036 ['activation_59[0][0]']
3 (SeparableConv2D)
separable_conv_1_normal_right1 (None, 32, 32, 44) 2332 ['activation_61[0][0]']
_3 (SeparableConv2D)
separable_conv_1_normal_left2_ (None, 32, 32, 44) 3036 ['activation_63[0][0]']
3 (SeparableConv2D)
separable_conv_1_normal_right2 (None, 32, 32, 44) 2332 ['activation_65[0][0]']
_3 (SeparableConv2D)
separable_conv_1_normal_left5_ (None, 32, 32, 44) 2332 ['activation_67[0][0]']
3 (SeparableConv2D)
separable_conv_1_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_1_normal_left1_3
t1_3 (BatchNormalization) [0][0]']
separable_conv_1_bn_normal_rig (None, 32, 32, 44) 176 ['separable_conv_1_normal_right1_
ht1_3 (BatchNormalization) 3[0][0]']
separable_conv_1_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_1_normal_left2_3
t2_3 (BatchNormalization) [0][0]']
separable_conv_1_bn_normal_rig (None, 32, 32, 44) 176 ['separable_conv_1_normal_right2_
ht2_3 (BatchNormalization) 3[0][0]']
separable_conv_1_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_1_normal_left5_3
t5_3 (BatchNormalization) [0][0]']
activation_60 (Activation) (None, 32, 32, 44) 0 ['separable_conv_1_bn_normal_left
1_3[0][0]']
activation_62 (Activation) (None, 32, 32, 44) 0 ['separable_conv_1_bn_normal_righ
t1_3[0][0]']
activation_64 (Activation) (None, 32, 32, 44) 0 ['separable_conv_1_bn_normal_left
2_3[0][0]']
activation_66 (Activation) (None, 32, 32, 44) 0 ['separable_conv_1_bn_normal_righ
t2_3[0][0]']
activation_68 (Activation) (None, 32, 32, 44) 0 ['separable_conv_1_bn_normal_left
5_3[0][0]']
separable_conv_2_normal_left1_ (None, 32, 32, 44) 3036 ['activation_60[0][0]']
3 (SeparableConv2D)
separable_conv_2_normal_right1 (None, 32, 32, 44) 2332 ['activation_62[0][0]']
_3 (SeparableConv2D)
separable_conv_2_normal_left2_ (None, 32, 32, 44) 3036 ['activation_64[0][0]']
3 (SeparableConv2D)
separable_conv_2_normal_right2 (None, 32, 32, 44) 2332 ['activation_66[0][0]']
_3 (SeparableConv2D)
separable_conv_2_normal_left5_ (None, 32, 32, 44) 2332 ['activation_68[0][0]']
3 (SeparableConv2D)
separable_conv_2_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_2_normal_left1_3
t1_3 (BatchNormalization) [0][0]']
separable_conv_2_bn_normal_rig (None, 32, 32, 44) 176 ['separable_conv_2_normal_right1_
ht1_3 (BatchNormalization) 3[0][0]']
separable_conv_2_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_2_normal_left2_3
t2_3 (BatchNormalization) [0][0]']
separable_conv_2_bn_normal_rig (None, 32, 32, 44) 176 ['separable_conv_2_normal_right2_
ht2_3 (BatchNormalization) 3[0][0]']
normal_left3_3 (AveragePooling (None, 32, 32, 44) 0 ['normal_bn_1_3[0][0]']
2D)
normal_left4_3 (AveragePooling (None, 32, 32, 44) 0 ['adjust_bn_3[0][0]']
2D)
normal_right4_3 (AveragePoolin (None, 32, 32, 44) 0 ['adjust_bn_3[0][0]']
g2D)
separable_conv_2_bn_normal_lef (None, 32, 32, 44) 176 ['separable_conv_2_normal_left5_3
t5_3 (BatchNormalization) [0][0]']
normal_add_1_3 (Add) (None, 32, 32, 44) 0 ['separable_conv_2_bn_normal_left
1_3[0][0]',
'separable_conv_2_bn_normal_righ
t1_3[0][0]']
normal_add_2_3 (Add) (None, 32, 32, 44) 0 ['separable_conv_2_bn_normal_left
2_3[0][0]',
'separable_conv_2_bn_normal_righ
t2_3[0][0]']
normal_add_3_3 (Add) (None, 32, 32, 44) 0 ['normal_left3_3[0][0]',
'adjust_bn_3[0][0]']
normal_add_4_3 (Add) (None, 32, 32, 44) 0 ['normal_left4_3[0][0]',
'normal_right4_3[0][0]']
normal_add_5_3 (Add) (None, 32, 32, 44) 0 ['separable_conv_2_bn_normal_left
5_3[0][0]',
'normal_bn_1_3[0][0]']
normal_concat_3 (Concatenate) (None, 32, 32, 264) 0 ['adjust_bn_3[0][0]',
'normal_add_1_3[0][0]',
'normal_add_2_3[0][0]',
'normal_add_3_3[0][0]',
'normal_add_4_3[0][0]',
'normal_add_5_3[0][0]']
activation_70 (Activation) (None, 32, 32, 264) 0 ['normal_concat_3[0][0]']
activation_69 (Activation) (None, 32, 32, 264) 0 ['normal_concat_2[0][0]']
reduction_conv_1_reduce_4 (Con (None, 32, 32, 88) 23232 ['activation_70[0][0]']
v2D)
adjust_conv_projection_reduce_ (None, 32, 32, 88) 23232 ['activation_69[0][0]']
4 (Conv2D)
reduction_bn_1_reduce_4 (Batch (None, 32, 32, 88) 352 ['reduction_conv_1_reduce_4[0][0]
Normalization) ']
adjust_bn_reduce_4 (BatchNorma (None, 32, 32, 88) 352 ['adjust_conv_projection_reduce_4
lization) [0][0]']
activation_71 (Activation) (None, 32, 32, 88) 0 ['reduction_bn_1_reduce_4[0][0]']
activation_73 (Activation) (None, 32, 32, 88) 0 ['adjust_bn_reduce_4[0][0]']
separable_conv_1_pad_reduction (None, 35, 35, 88) 0 ['activation_71[0][0]']
_left1_reduce_4 (ZeroPadding2D
)
separable_conv_1_pad_reduction (None, 37, 37, 88) 0 ['activation_73[0][0]']
_right1_reduce_4 (ZeroPadding2
D)
separable_conv_1_reduction_lef (None, 16, 16, 88) 9944 ['separable_conv_1_pad_reduction_
t1_reduce_4 (SeparableConv2D) left1_reduce_4[0][0]']
separable_conv_1_reduction_rig (None, 16, 16, 88) 12056 ['separable_conv_1_pad_reduction_
ht1_reduce_4 (SeparableConv2D) right1_reduce_4[0][0]']
separable_conv_1_bn_reduction_ (None, 16, 16, 88) 352 ['separable_conv_1_reduction_left
left1_reduce_4 (BatchNormaliza 1_reduce_4[0][0]']
tion)
separable_conv_1_bn_reduction_ (None, 16, 16, 88) 352 ['separable_conv_1_reduction_righ
right1_reduce_4 (BatchNormaliz t1_reduce_4[0][0]']
ation)
activation_72 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_reduction_l
eft1_reduce_4[0][0]']
activation_74 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_reduction_r
ight1_reduce_4[0][0]']
separable_conv_2_reduction_lef (None, 16, 16, 88) 9944 ['activation_72[0][0]']
t1_reduce_4 (SeparableConv2D)
separable_conv_2_reduction_rig (None, 16, 16, 88) 12056 ['activation_74[0][0]']
ht1_reduce_4 (SeparableConv2D)
activation_75 (Activation) (None, 32, 32, 88) 0 ['adjust_bn_reduce_4[0][0]']
separable_conv_2_bn_reduction_ (None, 16, 16, 88) 352 ['separable_conv_2_reduction_left
left1_reduce_4 (BatchNormaliza 1_reduce_4[0][0]']
tion)
separable_conv_2_bn_reduction_ (None, 16, 16, 88) 352 ['separable_conv_2_reduction_righ
right1_reduce_4 (BatchNormaliz t1_reduce_4[0][0]']
ation)
separable_conv_1_pad_reduction (None, 37, 37, 88) 0 ['activation_75[0][0]']
_right2_reduce_4 (ZeroPadding2
D)
activation_77 (Activation) (None, 32, 32, 88) 0 ['adjust_bn_reduce_4[0][0]']
reduction_add_1_reduce_4 (Add) (None, 16, 16, 88) 0 ['separable_conv_2_bn_reduction_l
eft1_reduce_4[0][0]',
'separable_conv_2_bn_reduction_r
ight1_reduce_4[0][0]']
separable_conv_1_reduction_rig (None, 16, 16, 88) 12056 ['separable_conv_1_pad_reduction_
ht2_reduce_4 (SeparableConv2D) right2_reduce_4[0][0]']
separable_conv_1_pad_reduction (None, 35, 35, 88) 0 ['activation_77[0][0]']
_right3_reduce_4 (ZeroPadding2
D)
activation_79 (Activation) (None, 16, 16, 88) 0 ['reduction_add_1_reduce_4[0][0]'
]
separable_conv_1_bn_reduction_ (None, 16, 16, 88) 352 ['separable_conv_1_reduction_righ
right2_reduce_4 (BatchNormaliz t2_reduce_4[0][0]']
ation)
separable_conv_1_reduction_rig (None, 16, 16, 88) 9944 ['separable_conv_1_pad_reduction_
ht3_reduce_4 (SeparableConv2D) right3_reduce_4[0][0]']
separable_conv_1_reduction_lef (None, 16, 16, 88) 8536 ['activation_79[0][0]']
t4_reduce_4 (SeparableConv2D)
activation_76 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_reduction_r
ight2_reduce_4[0][0]']
separable_conv_1_bn_reduction_ (None, 16, 16, 88) 352 ['separable_conv_1_reduction_righ
right3_reduce_4 (BatchNormaliz t3_reduce_4[0][0]']
ation)
separable_conv_1_bn_reduction_ (None, 16, 16, 88) 352 ['separable_conv_1_reduction_left
left4_reduce_4 (BatchNormaliza 4_reduce_4[0][0]']
tion)
reduction_pad_1_reduce_4 (Zero (None, 33, 33, 88) 0 ['reduction_bn_1_reduce_4[0][0]']
Padding2D)
separable_conv_2_reduction_rig (None, 16, 16, 88) 12056 ['activation_76[0][0]']
ht2_reduce_4 (SeparableConv2D)
activation_78 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_reduction_r
ight3_reduce_4[0][0]']
activation_80 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_reduction_l
eft4_reduce_4[0][0]']
reduction_left2_reduce_4 (MaxP (None, 16, 16, 88) 0 ['reduction_pad_1_reduce_4[0][0]'
ooling2D) ]
separable_conv_2_bn_reduction_ (None, 16, 16, 88) 352 ['separable_conv_2_reduction_righ
right2_reduce_4 (BatchNormaliz t2_reduce_4[0][0]']
ation)
separable_conv_2_reduction_rig (None, 16, 16, 88) 9944 ['activation_78[0][0]']
ht3_reduce_4 (SeparableConv2D)
separable_conv_2_reduction_lef (None, 16, 16, 88) 8536 ['activation_80[0][0]']
t4_reduce_4 (SeparableConv2D)
adjust_relu_1_5 (Activation) (None, 32, 32, 264) 0 ['normal_concat_3[0][0]']
reduction_add_2_reduce_4 (Add) (None, 16, 16, 88) 0 ['reduction_left2_reduce_4[0][0]'
, 'separable_conv_2_bn_reduction_
right2_reduce_4[0][0]']
reduction_left3_reduce_4 (Aver (None, 16, 16, 88) 0 ['reduction_pad_1_reduce_4[0][0]'
agePooling2D) ]
separable_conv_2_bn_reduction_ (None, 16, 16, 88) 352 ['separable_conv_2_reduction_righ
right3_reduce_4 (BatchNormaliz t3_reduce_4[0][0]']
ation)
reduction_left4_reduce_4 (Aver (None, 16, 16, 88) 0 ['reduction_add_1_reduce_4[0][0]'
agePooling2D) ]
separable_conv_2_bn_reduction_ (None, 16, 16, 88) 352 ['separable_conv_2_reduction_left
left4_reduce_4 (BatchNormaliza 4_reduce_4[0][0]']
tion)
reduction_right5_reduce_4 (Max (None, 16, 16, 88) 0 ['reduction_pad_1_reduce_4[0][0]'
Pooling2D) ]
zero_padding2d_2 (ZeroPadding2 (None, 33, 33, 264) 0 ['adjust_relu_1_5[0][0]']
D)
reduction_add3_reduce_4 (Add) (None, 16, 16, 88) 0 ['reduction_left3_reduce_4[0][0]'
, 'separable_conv_2_bn_reduction_
right3_reduce_4[0][0]']
add_26 (Add) (None, 16, 16, 88) 0 ['reduction_add_2_reduce_4[0][0]'
, 'reduction_left4_reduce_4[0][0]
']
reduction_add4_reduce_4 (Add) (None, 16, 16, 88) 0 ['separable_conv_2_bn_reduction_l
eft4_reduce_4[0][0]',
'reduction_right5_reduce_4[0][0]
']
cropping2d_2 (Cropping2D) (None, 32, 32, 264) 0 ['zero_padding2d_2[0][0]']
reduction_concat_reduce_4 (Con (None, 16, 16, 352) 0 ['reduction_add_2_reduce_4[0][0]'
catenate) , 'reduction_add3_reduce_4[0][0]'
, 'add_26[0][0]',
'reduction_add4_reduce_4[0][0]']
adjust_avg_pool_1_5 (AveragePo (None, 16, 16, 264) 0 ['adjust_relu_1_5[0][0]']
oling2D)
adjust_avg_pool_2_5 (AveragePo (None, 16, 16, 264) 0 ['cropping2d_2[0][0]']
oling2D)
adjust_conv_1_5 (Conv2D) (None, 16, 16, 44) 11616 ['adjust_avg_pool_1_5[0][0]']
adjust_conv_2_5 (Conv2D) (None, 16, 16, 44) 11616 ['adjust_avg_pool_2_5[0][0]']
activation_81 (Activation) (None, 16, 16, 352) 0 ['reduction_concat_reduce_4[0][0]
']
concatenate_2 (Concatenate) (None, 16, 16, 88) 0 ['adjust_conv_1_5[0][0]',
'adjust_conv_2_5[0][0]']
normal_conv_1_5 (Conv2D) (None, 16, 16, 88) 30976 ['activation_81[0][0]']
adjust_bn_5 (BatchNormalizatio (None, 16, 16, 88) 352 ['concatenate_2[0][0]']
n)
normal_bn_1_5 (BatchNormalizat (None, 16, 16, 88) 352 ['normal_conv_1_5[0][0]']
ion)
activation_82 (Activation) (None, 16, 16, 88) 0 ['normal_bn_1_5[0][0]']
activation_84 (Activation) (None, 16, 16, 88) 0 ['adjust_bn_5[0][0]']
activation_86 (Activation) (None, 16, 16, 88) 0 ['adjust_bn_5[0][0]']
activation_88 (Activation) (None, 16, 16, 88) 0 ['adjust_bn_5[0][0]']
activation_90 (Activation) (None, 16, 16, 88) 0 ['normal_bn_1_5[0][0]']
separable_conv_1_normal_left1_ (None, 16, 16, 88) 9944 ['activation_82[0][0]']
5 (SeparableConv2D)
separable_conv_1_normal_right1 (None, 16, 16, 88) 8536 ['activation_84[0][0]']
_5 (SeparableConv2D)
separable_conv_1_normal_left2_ (None, 16, 16, 88) 9944 ['activation_86[0][0]']
5 (SeparableConv2D)
separable_conv_1_normal_right2 (None, 16, 16, 88) 8536 ['activation_88[0][0]']
_5 (SeparableConv2D)
separable_conv_1_normal_left5_ (None, 16, 16, 88) 8536 ['activation_90[0][0]']
5 (SeparableConv2D)
separable_conv_1_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_1_normal_left1_5
t1_5 (BatchNormalization) [0][0]']
separable_conv_1_bn_normal_rig (None, 16, 16, 88) 352 ['separable_conv_1_normal_right1_
ht1_5 (BatchNormalization) 5[0][0]']
separable_conv_1_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_1_normal_left2_5
t2_5 (BatchNormalization) [0][0]']
separable_conv_1_bn_normal_rig (None, 16, 16, 88) 352 ['separable_conv_1_normal_right2_
ht2_5 (BatchNormalization) 5[0][0]']
separable_conv_1_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_1_normal_left5_5
t5_5 (BatchNormalization) [0][0]']
activation_83 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_normal_left
1_5[0][0]']
activation_85 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_normal_righ
t1_5[0][0]']
activation_87 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_normal_left
2_5[0][0]']
activation_89 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_normal_righ
t2_5[0][0]']
activation_91 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_normal_left
5_5[0][0]']
separable_conv_2_normal_left1_ (None, 16, 16, 88) 9944 ['activation_83[0][0]']
5 (SeparableConv2D)
separable_conv_2_normal_right1 (None, 16, 16, 88) 8536 ['activation_85[0][0]']
_5 (SeparableConv2D)
separable_conv_2_normal_left2_ (None, 16, 16, 88) 9944 ['activation_87[0][0]']
5 (SeparableConv2D)
separable_conv_2_normal_right2 (None, 16, 16, 88) 8536 ['activation_89[0][0]']
_5 (SeparableConv2D)
separable_conv_2_normal_left5_ (None, 16, 16, 88) 8536 ['activation_91[0][0]']
5 (SeparableConv2D)
separable_conv_2_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_2_normal_left1_5
t1_5 (BatchNormalization) [0][0]']
separable_conv_2_bn_normal_rig (None, 16, 16, 88) 352 ['separable_conv_2_normal_right1_
ht1_5 (BatchNormalization) 5[0][0]']
separable_conv_2_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_2_normal_left2_5
t2_5 (BatchNormalization) [0][0]']
separable_conv_2_bn_normal_rig (None, 16, 16, 88) 352 ['separable_conv_2_normal_right2_
ht2_5 (BatchNormalization) 5[0][0]']
normal_left3_5 (AveragePooling (None, 16, 16, 88) 0 ['normal_bn_1_5[0][0]']
2D)
normal_left4_5 (AveragePooling (None, 16, 16, 88) 0 ['adjust_bn_5[0][0]']
2D)
normal_right4_5 (AveragePoolin (None, 16, 16, 88) 0 ['adjust_bn_5[0][0]']
g2D)
separable_conv_2_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_2_normal_left5_5
t5_5 (BatchNormalization) [0][0]']
normal_add_1_5 (Add) (None, 16, 16, 88) 0 ['separable_conv_2_bn_normal_left
1_5[0][0]',
'separable_conv_2_bn_normal_righ
t1_5[0][0]']
normal_add_2_5 (Add) (None, 16, 16, 88) 0 ['separable_conv_2_bn_normal_left
2_5[0][0]',
'separable_conv_2_bn_normal_righ
t2_5[0][0]']
normal_add_3_5 (Add) (None, 16, 16, 88) 0 ['normal_left3_5[0][0]',
'adjust_bn_5[0][0]']
normal_add_4_5 (Add) (None, 16, 16, 88) 0 ['normal_left4_5[0][0]',
'normal_right4_5[0][0]']
normal_add_5_5 (Add) (None, 16, 16, 88) 0 ['separable_conv_2_bn_normal_left
5_5[0][0]',
'normal_bn_1_5[0][0]']
normal_concat_5 (Concatenate) (None, 16, 16, 528) 0 ['adjust_bn_5[0][0]',
'normal_add_1_5[0][0]',
'normal_add_2_5[0][0]',
'normal_add_3_5[0][0]',
'normal_add_4_5[0][0]',
'normal_add_5_5[0][0]']
activation_92 (Activation) (None, 16, 16, 352) 0 ['reduction_concat_reduce_4[0][0]
']
activation_93 (Activation) (None, 16, 16, 528) 0 ['normal_concat_5[0][0]']
adjust_conv_projection_6 (Conv (None, 16, 16, 88) 30976 ['activation_92[0][0]']
2D)
normal_conv_1_6 (Conv2D) (None, 16, 16, 88) 46464 ['activation_93[0][0]']
adjust_bn_6 (BatchNormalizatio (None, 16, 16, 88) 352 ['adjust_conv_projection_6[0][0]'
n) ]
normal_bn_1_6 (BatchNormalizat (None, 16, 16, 88) 352 ['normal_conv_1_6[0][0]']
ion)
activation_94 (Activation) (None, 16, 16, 88) 0 ['normal_bn_1_6[0][0]']
activation_96 (Activation) (None, 16, 16, 88) 0 ['adjust_bn_6[0][0]']
activation_98 (Activation) (None, 16, 16, 88) 0 ['adjust_bn_6[0][0]']
activation_100 (Activation) (None, 16, 16, 88) 0 ['adjust_bn_6[0][0]']
activation_102 (Activation) (None, 16, 16, 88) 0 ['normal_bn_1_6[0][0]']
separable_conv_1_normal_left1_ (None, 16, 16, 88) 9944 ['activation_94[0][0]']
6 (SeparableConv2D)
separable_conv_1_normal_right1 (None, 16, 16, 88) 8536 ['activation_96[0][0]']
_6 (SeparableConv2D)
separable_conv_1_normal_left2_ (None, 16, 16, 88) 9944 ['activation_98[0][0]']
6 (SeparableConv2D)
separable_conv_1_normal_right2 (None, 16, 16, 88) 8536 ['activation_100[0][0]']
_6 (SeparableConv2D)
separable_conv_1_normal_left5_ (None, 16, 16, 88) 8536 ['activation_102[0][0]']
6 (SeparableConv2D)
separable_conv_1_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_1_normal_left1_6
t1_6 (BatchNormalization) [0][0]']
separable_conv_1_bn_normal_rig (None, 16, 16, 88) 352 ['separable_conv_1_normal_right1_
ht1_6 (BatchNormalization) 6[0][0]']
separable_conv_1_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_1_normal_left2_6
t2_6 (BatchNormalization) [0][0]']
separable_conv_1_bn_normal_rig (None, 16, 16, 88) 352 ['separable_conv_1_normal_right2_
ht2_6 (BatchNormalization) 6[0][0]']
separable_conv_1_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_1_normal_left5_6
t5_6 (BatchNormalization) [0][0]']
activation_95 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_normal_left
1_6[0][0]']
activation_97 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_normal_righ
t1_6[0][0]']
activation_99 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_normal_left
2_6[0][0]']
activation_101 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_normal_righ
t2_6[0][0]']
activation_103 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_normal_left
5_6[0][0]']
separable_conv_2_normal_left1_ (None, 16, 16, 88) 9944 ['activation_95[0][0]']
6 (SeparableConv2D)
separable_conv_2_normal_right1 (None, 16, 16, 88) 8536 ['activation_97[0][0]']
_6 (SeparableConv2D)
separable_conv_2_normal_left2_ (None, 16, 16, 88) 9944 ['activation_99[0][0]']
6 (SeparableConv2D)
separable_conv_2_normal_right2 (None, 16, 16, 88) 8536 ['activation_101[0][0]']
_6 (SeparableConv2D)
separable_conv_2_normal_left5_ (None, 16, 16, 88) 8536 ['activation_103[0][0]']
6 (SeparableConv2D)
separable_conv_2_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_2_normal_left1_6
t1_6 (BatchNormalization) [0][0]']
separable_conv_2_bn_normal_rig (None, 16, 16, 88) 352 ['separable_conv_2_normal_right1_
ht1_6 (BatchNormalization) 6[0][0]']
separable_conv_2_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_2_normal_left2_6
t2_6 (BatchNormalization) [0][0]']
separable_conv_2_bn_normal_rig (None, 16, 16, 88) 352 ['separable_conv_2_normal_right2_
ht2_6 (BatchNormalization) 6[0][0]']
normal_left3_6 (AveragePooling (None, 16, 16, 88) 0 ['normal_bn_1_6[0][0]']
2D)
normal_left4_6 (AveragePooling (None, 16, 16, 88) 0 ['adjust_bn_6[0][0]']
2D)
normal_right4_6 (AveragePoolin (None, 16, 16, 88) 0 ['adjust_bn_6[0][0]']
g2D)
separable_conv_2_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_2_normal_left5_6
t5_6 (BatchNormalization) [0][0]']
normal_add_1_6 (Add) (None, 16, 16, 88) 0 ['separable_conv_2_bn_normal_left
1_6[0][0]',
'separable_conv_2_bn_normal_righ
t1_6[0][0]']
normal_add_2_6 (Add) (None, 16, 16, 88) 0 ['separable_conv_2_bn_normal_left
2_6[0][0]',
'separable_conv_2_bn_normal_righ
t2_6[0][0]']
normal_add_3_6 (Add) (None, 16, 16, 88) 0 ['normal_left3_6[0][0]',
'adjust_bn_6[0][0]']
normal_add_4_6 (Add) (None, 16, 16, 88) 0 ['normal_left4_6[0][0]',
'normal_right4_6[0][0]']
normal_add_5_6 (Add) (None, 16, 16, 88) 0 ['separable_conv_2_bn_normal_left
5_6[0][0]',
'normal_bn_1_6[0][0]']
normal_concat_6 (Concatenate) (None, 16, 16, 528) 0 ['adjust_bn_6[0][0]',
'normal_add_1_6[0][0]',
'normal_add_2_6[0][0]',
'normal_add_3_6[0][0]',
'normal_add_4_6[0][0]',
'normal_add_5_6[0][0]']
activation_104 (Activation) (None, 16, 16, 528) 0 ['normal_concat_5[0][0]']
activation_105 (Activation) (None, 16, 16, 528) 0 ['normal_concat_6[0][0]']
adjust_conv_projection_7 (Conv (None, 16, 16, 88) 46464 ['activation_104[0][0]']
2D)
normal_conv_1_7 (Conv2D) (None, 16, 16, 88) 46464 ['activation_105[0][0]']
adjust_bn_7 (BatchNormalizatio (None, 16, 16, 88) 352 ['adjust_conv_projection_7[0][0]'
n) ]
normal_bn_1_7 (BatchNormalizat (None, 16, 16, 88) 352 ['normal_conv_1_7[0][0]']
ion)
activation_106 (Activation) (None, 16, 16, 88) 0 ['normal_bn_1_7[0][0]']
activation_108 (Activation) (None, 16, 16, 88) 0 ['adjust_bn_7[0][0]']
activation_110 (Activation) (None, 16, 16, 88) 0 ['adjust_bn_7[0][0]']
activation_112 (Activation) (None, 16, 16, 88) 0 ['adjust_bn_7[0][0]']
activation_114 (Activation) (None, 16, 16, 88) 0 ['normal_bn_1_7[0][0]']
separable_conv_1_normal_left1_ (None, 16, 16, 88) 9944 ['activation_106[0][0]']
7 (SeparableConv2D)
separable_conv_1_normal_right1 (None, 16, 16, 88) 8536 ['activation_108[0][0]']
_7 (SeparableConv2D)
separable_conv_1_normal_left2_ (None, 16, 16, 88) 9944 ['activation_110[0][0]']
7 (SeparableConv2D)
separable_conv_1_normal_right2 (None, 16, 16, 88) 8536 ['activation_112[0][0]']
_7 (SeparableConv2D)
separable_conv_1_normal_left5_ (None, 16, 16, 88) 8536 ['activation_114[0][0]']
7 (SeparableConv2D)
separable_conv_1_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_1_normal_left1_7
t1_7 (BatchNormalization) [0][0]']
separable_conv_1_bn_normal_rig (None, 16, 16, 88) 352 ['separable_conv_1_normal_right1_
ht1_7 (BatchNormalization) 7[0][0]']
separable_conv_1_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_1_normal_left2_7
t2_7 (BatchNormalization) [0][0]']
separable_conv_1_bn_normal_rig (None, 16, 16, 88) 352 ['separable_conv_1_normal_right2_
ht2_7 (BatchNormalization) 7[0][0]']
separable_conv_1_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_1_normal_left5_7
t5_7 (BatchNormalization) [0][0]']
activation_107 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_normal_left
1_7[0][0]']
activation_109 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_normal_righ
t1_7[0][0]']
activation_111 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_normal_left
2_7[0][0]']
activation_113 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_normal_righ
t2_7[0][0]']
activation_115 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_normal_left
5_7[0][0]']
separable_conv_2_normal_left1_ (None, 16, 16, 88) 9944 ['activation_107[0][0]']
7 (SeparableConv2D)
separable_conv_2_normal_right1 (None, 16, 16, 88) 8536 ['activation_109[0][0]']
_7 (SeparableConv2D)
separable_conv_2_normal_left2_ (None, 16, 16, 88) 9944 ['activation_111[0][0]']
7 (SeparableConv2D)
separable_conv_2_normal_right2 (None, 16, 16, 88) 8536 ['activation_113[0][0]']
_7 (SeparableConv2D)
separable_conv_2_normal_left5_ (None, 16, 16, 88) 8536 ['activation_115[0][0]']
7 (SeparableConv2D)
separable_conv_2_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_2_normal_left1_7
t1_7 (BatchNormalization) [0][0]']
separable_conv_2_bn_normal_rig (None, 16, 16, 88) 352 ['separable_conv_2_normal_right1_
ht1_7 (BatchNormalization) 7[0][0]']
separable_conv_2_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_2_normal_left2_7
t2_7 (BatchNormalization) [0][0]']
separable_conv_2_bn_normal_rig (None, 16, 16, 88) 352 ['separable_conv_2_normal_right2_
ht2_7 (BatchNormalization) 7[0][0]']
normal_left3_7 (AveragePooling (None, 16, 16, 88) 0 ['normal_bn_1_7[0][0]']
2D)
normal_left4_7 (AveragePooling (None, 16, 16, 88) 0 ['adjust_bn_7[0][0]']
2D)
normal_right4_7 (AveragePoolin (None, 16, 16, 88) 0 ['adjust_bn_7[0][0]']
g2D)
separable_conv_2_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_2_normal_left5_7
t5_7 (BatchNormalization) [0][0]']
normal_add_1_7 (Add) (None, 16, 16, 88) 0 ['separable_conv_2_bn_normal_left
1_7[0][0]',
'separable_conv_2_bn_normal_righ
t1_7[0][0]']
normal_add_2_7 (Add) (None, 16, 16, 88) 0 ['separable_conv_2_bn_normal_left
2_7[0][0]',
'separable_conv_2_bn_normal_righ
t2_7[0][0]']
normal_add_3_7 (Add) (None, 16, 16, 88) 0 ['normal_left3_7[0][0]',
'adjust_bn_7[0][0]']
normal_add_4_7 (Add) (None, 16, 16, 88) 0 ['normal_left4_7[0][0]',
'normal_right4_7[0][0]']
normal_add_5_7 (Add) (None, 16, 16, 88) 0 ['separable_conv_2_bn_normal_left
5_7[0][0]',
'normal_bn_1_7[0][0]']
normal_concat_7 (Concatenate) (None, 16, 16, 528) 0 ['adjust_bn_7[0][0]',
'normal_add_1_7[0][0]',
'normal_add_2_7[0][0]',
'normal_add_3_7[0][0]',
'normal_add_4_7[0][0]',
'normal_add_5_7[0][0]']
activation_116 (Activation) (None, 16, 16, 528) 0 ['normal_concat_6[0][0]']
activation_117 (Activation) (None, 16, 16, 528) 0 ['normal_concat_7[0][0]']
adjust_conv_projection_8 (Conv (None, 16, 16, 88) 46464 ['activation_116[0][0]']
2D)
normal_conv_1_8 (Conv2D) (None, 16, 16, 88) 46464 ['activation_117[0][0]']
adjust_bn_8 (BatchNormalizatio (None, 16, 16, 88) 352 ['adjust_conv_projection_8[0][0]'
n) ]
normal_bn_1_8 (BatchNormalizat (None, 16, 16, 88) 352 ['normal_conv_1_8[0][0]']
ion)
activation_118 (Activation) (None, 16, 16, 88) 0 ['normal_bn_1_8[0][0]']
activation_120 (Activation) (None, 16, 16, 88) 0 ['adjust_bn_8[0][0]']
activation_122 (Activation) (None, 16, 16, 88) 0 ['adjust_bn_8[0][0]']
activation_124 (Activation) (None, 16, 16, 88) 0 ['adjust_bn_8[0][0]']
activation_126 (Activation) (None, 16, 16, 88) 0 ['normal_bn_1_8[0][0]']
separable_conv_1_normal_left1_ (None, 16, 16, 88) 9944 ['activation_118[0][0]']
8 (SeparableConv2D)
separable_conv_1_normal_right1 (None, 16, 16, 88) 8536 ['activation_120[0][0]']
_8 (SeparableConv2D)
separable_conv_1_normal_left2_ (None, 16, 16, 88) 9944 ['activation_122[0][0]']
8 (SeparableConv2D)
separable_conv_1_normal_right2 (None, 16, 16, 88) 8536 ['activation_124[0][0]']
_8 (SeparableConv2D)
separable_conv_1_normal_left5_ (None, 16, 16, 88) 8536 ['activation_126[0][0]']
8 (SeparableConv2D)
separable_conv_1_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_1_normal_left1_8
t1_8 (BatchNormalization) [0][0]']
separable_conv_1_bn_normal_rig (None, 16, 16, 88) 352 ['separable_conv_1_normal_right1_
ht1_8 (BatchNormalization) 8[0][0]']
separable_conv_1_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_1_normal_left2_8
t2_8 (BatchNormalization) [0][0]']
separable_conv_1_bn_normal_rig (None, 16, 16, 88) 352 ['separable_conv_1_normal_right2_
ht2_8 (BatchNormalization) 8[0][0]']
separable_conv_1_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_1_normal_left5_8
t5_8 (BatchNormalization) [0][0]']
activation_119 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_normal_left
1_8[0][0]']
activation_121 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_normal_righ
t1_8[0][0]']
activation_123 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_normal_left
2_8[0][0]']
activation_125 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_normal_righ
t2_8[0][0]']
activation_127 (Activation) (None, 16, 16, 88) 0 ['separable_conv_1_bn_normal_left
5_8[0][0]']
separable_conv_2_normal_left1_ (None, 16, 16, 88) 9944 ['activation_119[0][0]']
8 (SeparableConv2D)
separable_conv_2_normal_right1 (None, 16, 16, 88) 8536 ['activation_121[0][0]']
_8 (SeparableConv2D)
separable_conv_2_normal_left2_ (None, 16, 16, 88) 9944 ['activation_123[0][0]']
8 (SeparableConv2D)
separable_conv_2_normal_right2 (None, 16, 16, 88) 8536 ['activation_125[0][0]']
_8 (SeparableConv2D)
separable_conv_2_normal_left5_ (None, 16, 16, 88) 8536 ['activation_127[0][0]']
8 (SeparableConv2D)
separable_conv_2_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_2_normal_left1_8
t1_8 (BatchNormalization) [0][0]']
separable_conv_2_bn_normal_rig (None, 16, 16, 88) 352 ['separable_conv_2_normal_right1_
ht1_8 (BatchNormalization) 8[0][0]']
separable_conv_2_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_2_normal_left2_8
t2_8 (BatchNormalization) [0][0]']
separable_conv_2_bn_normal_rig (None, 16, 16, 88) 352 ['separable_conv_2_normal_right2_
ht2_8 (BatchNormalization) 8[0][0]']
normal_left3_8 (AveragePooling (None, 16, 16, 88) 0 ['normal_bn_1_8[0][0]']
2D)
normal_left4_8 (AveragePooling (None, 16, 16, 88) 0 ['adjust_bn_8[0][0]']
2D)
normal_right4_8 (AveragePoolin (None, 16, 16, 88) 0 ['adjust_bn_8[0][0]']
g2D)
separable_conv_2_bn_normal_lef (None, 16, 16, 88) 352 ['separable_conv_2_normal_left5_8
t5_8 (BatchNormalization) [0][0]']
normal_add_1_8 (Add) (None, 16, 16, 88) 0 ['separable_conv_2_bn_normal_left
1_8[0][0]',
'separable_conv_2_bn_normal_righ
t1_8[0][0]']
normal_add_2_8 (Add) (None, 16, 16, 88) 0 ['separable_conv_2_bn_normal_left
2_8[0][0]',
'separable_conv_2_bn_normal_righ
t2_8[0][0]']
normal_add_3_8 (Add) (None, 16, 16, 88) 0 ['normal_left3_8[0][0]',
'adjust_bn_8[0][0]']
normal_add_4_8 (Add) (None, 16, 16, 88) 0 ['normal_left4_8[0][0]',
'normal_right4_8[0][0]']
normal_add_5_8 (Add) (None, 16, 16, 88) 0 ['separable_conv_2_bn_normal_left
5_8[0][0]',
'normal_bn_1_8[0][0]']
normal_concat_8 (Concatenate) (None, 16, 16, 528) 0 ['adjust_bn_8[0][0]',
'normal_add_1_8[0][0]',
'normal_add_2_8[0][0]',
'normal_add_3_8[0][0]',
'normal_add_4_8[0][0]',
'normal_add_5_8[0][0]']
activation_129 (Activation) (None, 16, 16, 528) 0 ['normal_concat_8[0][0]']
activation_128 (Activation) (None, 16, 16, 528) 0 ['normal_concat_7[0][0]']
reduction_conv_1_reduce_8 (Con (None, 16, 16, 176) 92928 ['activation_129[0][0]']
v2D)
adjust_conv_projection_reduce_ (None, 16, 16, 176) 92928 ['activation_128[0][0]']
8 (Conv2D)
reduction_bn_1_reduce_8 (Batch (None, 16, 16, 176) 704 ['reduction_conv_1_reduce_8[0][0]
Normalization) ']
adjust_bn_reduce_8 (BatchNorma (None, 16, 16, 176) 704 ['adjust_conv_projection_reduce_8
lization) [0][0]']
activation_130 (Activation) (None, 16, 16, 176) 0 ['reduction_bn_1_reduce_8[0][0]']
activation_132 (Activation) (None, 16, 16, 176) 0 ['adjust_bn_reduce_8[0][0]']
separable_conv_1_pad_reduction (None, 19, 19, 176) 0 ['activation_130[0][0]']
_left1_reduce_8 (ZeroPadding2D
)
separable_conv_1_pad_reduction (None, 21, 21, 176) 0 ['activation_132[0][0]']
_right1_reduce_8 (ZeroPadding2
D)
separable_conv_1_reduction_lef (None, 8, 8, 176) 35376 ['separable_conv_1_pad_reduction_
t1_reduce_8 (SeparableConv2D) left1_reduce_8[0][0]']
separable_conv_1_reduction_rig (None, 8, 8, 176) 39600 ['separable_conv_1_pad_reduction_
ht1_reduce_8 (SeparableConv2D) right1_reduce_8[0][0]']
separable_conv_1_bn_reduction_ (None, 8, 8, 176) 704 ['separable_conv_1_reduction_left
left1_reduce_8 (BatchNormaliza 1_reduce_8[0][0]']
tion)
separable_conv_1_bn_reduction_ (None, 8, 8, 176) 704 ['separable_conv_1_reduction_righ
right1_reduce_8 (BatchNormaliz t1_reduce_8[0][0]']
ation)
activation_131 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_reduction_l
eft1_reduce_8[0][0]']
activation_133 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_reduction_r
ight1_reduce_8[0][0]']
separable_conv_2_reduction_lef (None, 8, 8, 176) 35376 ['activation_131[0][0]']
t1_reduce_8 (SeparableConv2D)
separable_conv_2_reduction_rig (None, 8, 8, 176) 39600 ['activation_133[0][0]']
ht1_reduce_8 (SeparableConv2D)
activation_134 (Activation) (None, 16, 16, 176) 0 ['adjust_bn_reduce_8[0][0]']
separable_conv_2_bn_reduction_ (None, 8, 8, 176) 704 ['separable_conv_2_reduction_left
left1_reduce_8 (BatchNormaliza 1_reduce_8[0][0]']
tion)
separable_conv_2_bn_reduction_ (None, 8, 8, 176) 704 ['separable_conv_2_reduction_righ
right1_reduce_8 (BatchNormaliz t1_reduce_8[0][0]']
ation)
separable_conv_1_pad_reduction (None, 21, 21, 176) 0 ['activation_134[0][0]']
_right2_reduce_8 (ZeroPadding2
D)
activation_136 (Activation) (None, 16, 16, 176) 0 ['adjust_bn_reduce_8[0][0]']
reduction_add_1_reduce_8 (Add) (None, 8, 8, 176) 0 ['separable_conv_2_bn_reduction_l
eft1_reduce_8[0][0]',
'separable_conv_2_bn_reduction_r
ight1_reduce_8[0][0]']
separable_conv_1_reduction_rig (None, 8, 8, 176) 39600 ['separable_conv_1_pad_reduction_
ht2_reduce_8 (SeparableConv2D) right2_reduce_8[0][0]']
separable_conv_1_pad_reduction (None, 19, 19, 176) 0 ['activation_136[0][0]']
_right3_reduce_8 (ZeroPadding2
D)
activation_138 (Activation) (None, 8, 8, 176) 0 ['reduction_add_1_reduce_8[0][0]'
]
separable_conv_1_bn_reduction_ (None, 8, 8, 176) 704 ['separable_conv_1_reduction_righ
right2_reduce_8 (BatchNormaliz t2_reduce_8[0][0]']
ation)
separable_conv_1_reduction_rig (None, 8, 8, 176) 35376 ['separable_conv_1_pad_reduction_
ht3_reduce_8 (SeparableConv2D) right3_reduce_8[0][0]']
separable_conv_1_reduction_lef (None, 8, 8, 176) 32560 ['activation_138[0][0]']
t4_reduce_8 (SeparableConv2D)
activation_135 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_reduction_r
ight2_reduce_8[0][0]']
separable_conv_1_bn_reduction_ (None, 8, 8, 176) 704 ['separable_conv_1_reduction_righ
right3_reduce_8 (BatchNormaliz t3_reduce_8[0][0]']
ation)
separable_conv_1_bn_reduction_ (None, 8, 8, 176) 704 ['separable_conv_1_reduction_left
left4_reduce_8 (BatchNormaliza 4_reduce_8[0][0]']
tion)
reduction_pad_1_reduce_8 (Zero (None, 17, 17, 176) 0 ['reduction_bn_1_reduce_8[0][0]']
Padding2D)
separable_conv_2_reduction_rig (None, 8, 8, 176) 39600 ['activation_135[0][0]']
ht2_reduce_8 (SeparableConv2D)
activation_137 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_reduction_r
ight3_reduce_8[0][0]']
activation_139 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_reduction_l
eft4_reduce_8[0][0]']
reduction_left2_reduce_8 (MaxP (None, 8, 8, 176) 0 ['reduction_pad_1_reduce_8[0][0]'
ooling2D) ]
separable_conv_2_bn_reduction_ (None, 8, 8, 176) 704 ['separable_conv_2_reduction_righ
right2_reduce_8 (BatchNormaliz t2_reduce_8[0][0]']
ation)
separable_conv_2_reduction_rig (None, 8, 8, 176) 35376 ['activation_137[0][0]']
ht3_reduce_8 (SeparableConv2D)
separable_conv_2_reduction_lef (None, 8, 8, 176) 32560 ['activation_139[0][0]']
t4_reduce_8 (SeparableConv2D)
adjust_relu_1_9 (Activation) (None, 16, 16, 528) 0 ['normal_concat_8[0][0]']
reduction_add_2_reduce_8 (Add) (None, 8, 8, 176) 0 ['reduction_left2_reduce_8[0][0]'
, 'separable_conv_2_bn_reduction_
right2_reduce_8[0][0]']
reduction_left3_reduce_8 (Aver (None, 8, 8, 176) 0 ['reduction_pad_1_reduce_8[0][0]'
agePooling2D) ]
separable_conv_2_bn_reduction_ (None, 8, 8, 176) 704 ['separable_conv_2_reduction_righ
right3_reduce_8 (BatchNormaliz t3_reduce_8[0][0]']
ation)
reduction_left4_reduce_8 (Aver (None, 8, 8, 176) 0 ['reduction_add_1_reduce_8[0][0]'
agePooling2D) ]
separable_conv_2_bn_reduction_ (None, 8, 8, 176) 704 ['separable_conv_2_reduction_left
left4_reduce_8 (BatchNormaliza 4_reduce_8[0][0]']
tion)
reduction_right5_reduce_8 (Max (None, 8, 8, 176) 0 ['reduction_pad_1_reduce_8[0][0]'
Pooling2D) ]
zero_padding2d_3 (ZeroPadding2 (None, 17, 17, 528) 0 ['adjust_relu_1_9[0][0]']
D)
reduction_add3_reduce_8 (Add) (None, 8, 8, 176) 0 ['reduction_left3_reduce_8[0][0]'
, 'separable_conv_2_bn_reduction_
right3_reduce_8[0][0]']
add_27 (Add) (None, 8, 8, 176) 0 ['reduction_add_2_reduce_8[0][0]'
, 'reduction_left4_reduce_8[0][0]
']
reduction_add4_reduce_8 (Add) (None, 8, 8, 176) 0 ['separable_conv_2_bn_reduction_l
eft4_reduce_8[0][0]',
'reduction_right5_reduce_8[0][0]
']
cropping2d_3 (Cropping2D) (None, 16, 16, 528) 0 ['zero_padding2d_3[0][0]']
reduction_concat_reduce_8 (Con (None, 8, 8, 704) 0 ['reduction_add_2_reduce_8[0][0]'
catenate) , 'reduction_add3_reduce_8[0][0]'
, 'add_27[0][0]',
'reduction_add4_reduce_8[0][0]']
adjust_avg_pool_1_9 (AveragePo (None, 8, 8, 528) 0 ['adjust_relu_1_9[0][0]']
oling2D)
adjust_avg_pool_2_9 (AveragePo (None, 8, 8, 528) 0 ['cropping2d_3[0][0]']
oling2D)
adjust_conv_1_9 (Conv2D) (None, 8, 8, 88) 46464 ['adjust_avg_pool_1_9[0][0]']
adjust_conv_2_9 (Conv2D) (None, 8, 8, 88) 46464 ['adjust_avg_pool_2_9[0][0]']
activation_140 (Activation) (None, 8, 8, 704) 0 ['reduction_concat_reduce_8[0][0]
']
concatenate_3 (Concatenate) (None, 8, 8, 176) 0 ['adjust_conv_1_9[0][0]',
'adjust_conv_2_9[0][0]']
normal_conv_1_9 (Conv2D) (None, 8, 8, 176) 123904 ['activation_140[0][0]']
adjust_bn_9 (BatchNormalizatio (None, 8, 8, 176) 704 ['concatenate_3[0][0]']
n)
normal_bn_1_9 (BatchNormalizat (None, 8, 8, 176) 704 ['normal_conv_1_9[0][0]']
ion)
activation_141 (Activation) (None, 8, 8, 176) 0 ['normal_bn_1_9[0][0]']
activation_143 (Activation) (None, 8, 8, 176) 0 ['adjust_bn_9[0][0]']
activation_145 (Activation) (None, 8, 8, 176) 0 ['adjust_bn_9[0][0]']
activation_147 (Activation) (None, 8, 8, 176) 0 ['adjust_bn_9[0][0]']
activation_149 (Activation) (None, 8, 8, 176) 0 ['normal_bn_1_9[0][0]']
separable_conv_1_normal_left1_ (None, 8, 8, 176) 35376 ['activation_141[0][0]']
9 (SeparableConv2D)
separable_conv_1_normal_right1 (None, 8, 8, 176) 32560 ['activation_143[0][0]']
_9 (SeparableConv2D)
separable_conv_1_normal_left2_ (None, 8, 8, 176) 35376 ['activation_145[0][0]']
9 (SeparableConv2D)
separable_conv_1_normal_right2 (None, 8, 8, 176) 32560 ['activation_147[0][0]']
_9 (SeparableConv2D)
separable_conv_1_normal_left5_ (None, 8, 8, 176) 32560 ['activation_149[0][0]']
9 (SeparableConv2D)
separable_conv_1_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_1_normal_left1_9
t1_9 (BatchNormalization) [0][0]']
separable_conv_1_bn_normal_rig (None, 8, 8, 176) 704 ['separable_conv_1_normal_right1_
ht1_9 (BatchNormalization) 9[0][0]']
separable_conv_1_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_1_normal_left2_9
t2_9 (BatchNormalization) [0][0]']
separable_conv_1_bn_normal_rig (None, 8, 8, 176) 704 ['separable_conv_1_normal_right2_
ht2_9 (BatchNormalization) 9[0][0]']
separable_conv_1_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_1_normal_left5_9
t5_9 (BatchNormalization) [0][0]']
activation_142 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_normal_left
1_9[0][0]']
activation_144 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_normal_righ
t1_9[0][0]']
activation_146 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_normal_left
2_9[0][0]']
activation_148 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_normal_righ
t2_9[0][0]']
activation_150 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_normal_left
5_9[0][0]']
separable_conv_2_normal_left1_ (None, 8, 8, 176) 35376 ['activation_142[0][0]']
9 (SeparableConv2D)
separable_conv_2_normal_right1 (None, 8, 8, 176) 32560 ['activation_144[0][0]']
_9 (SeparableConv2D)
separable_conv_2_normal_left2_ (None, 8, 8, 176) 35376 ['activation_146[0][0]']
9 (SeparableConv2D)
separable_conv_2_normal_right2 (None, 8, 8, 176) 32560 ['activation_148[0][0]']
_9 (SeparableConv2D)
separable_conv_2_normal_left5_ (None, 8, 8, 176) 32560 ['activation_150[0][0]']
9 (SeparableConv2D)
separable_conv_2_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_2_normal_left1_9
t1_9 (BatchNormalization) [0][0]']
separable_conv_2_bn_normal_rig (None, 8, 8, 176) 704 ['separable_conv_2_normal_right1_
ht1_9 (BatchNormalization) 9[0][0]']
separable_conv_2_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_2_normal_left2_9
t2_9 (BatchNormalization) [0][0]']
separable_conv_2_bn_normal_rig (None, 8, 8, 176) 704 ['separable_conv_2_normal_right2_
ht2_9 (BatchNormalization) 9[0][0]']
normal_left3_9 (AveragePooling (None, 8, 8, 176) 0 ['normal_bn_1_9[0][0]']
2D)
normal_left4_9 (AveragePooling (None, 8, 8, 176) 0 ['adjust_bn_9[0][0]']
2D)
normal_right4_9 (AveragePoolin (None, 8, 8, 176) 0 ['adjust_bn_9[0][0]']
g2D)
separable_conv_2_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_2_normal_left5_9
t5_9 (BatchNormalization) [0][0]']
normal_add_1_9 (Add) (None, 8, 8, 176) 0 ['separable_conv_2_bn_normal_left
1_9[0][0]',
'separable_conv_2_bn_normal_righ
t1_9[0][0]']
normal_add_2_9 (Add) (None, 8, 8, 176) 0 ['separable_conv_2_bn_normal_left
2_9[0][0]',
'separable_conv_2_bn_normal_righ
t2_9[0][0]']
normal_add_3_9 (Add) (None, 8, 8, 176) 0 ['normal_left3_9[0][0]',
'adjust_bn_9[0][0]']
normal_add_4_9 (Add) (None, 8, 8, 176) 0 ['normal_left4_9[0][0]',
'normal_right4_9[0][0]']
normal_add_5_9 (Add) (None, 8, 8, 176) 0 ['separable_conv_2_bn_normal_left
5_9[0][0]',
'normal_bn_1_9[0][0]']
normal_concat_9 (Concatenate) (None, 8, 8, 1056) 0 ['adjust_bn_9[0][0]',
'normal_add_1_9[0][0]',
'normal_add_2_9[0][0]',
'normal_add_3_9[0][0]',
'normal_add_4_9[0][0]',
'normal_add_5_9[0][0]']
activation_151 (Activation) (None, 8, 8, 704) 0 ['reduction_concat_reduce_8[0][0]
']
activation_152 (Activation) (None, 8, 8, 1056) 0 ['normal_concat_9[0][0]']
adjust_conv_projection_10 (Con (None, 8, 8, 176) 123904 ['activation_151[0][0]']
v2D)
normal_conv_1_10 (Conv2D) (None, 8, 8, 176) 185856 ['activation_152[0][0]']
adjust_bn_10 (BatchNormalizati (None, 8, 8, 176) 704 ['adjust_conv_projection_10[0][0]
on) ']
normal_bn_1_10 (BatchNormaliza (None, 8, 8, 176) 704 ['normal_conv_1_10[0][0]']
tion)
activation_153 (Activation) (None, 8, 8, 176) 0 ['normal_bn_1_10[0][0]']
activation_155 (Activation) (None, 8, 8, 176) 0 ['adjust_bn_10[0][0]']
activation_157 (Activation) (None, 8, 8, 176) 0 ['adjust_bn_10[0][0]']
activation_159 (Activation) (None, 8, 8, 176) 0 ['adjust_bn_10[0][0]']
activation_161 (Activation) (None, 8, 8, 176) 0 ['normal_bn_1_10[0][0]']
separable_conv_1_normal_left1_ (None, 8, 8, 176) 35376 ['activation_153[0][0]']
10 (SeparableConv2D)
separable_conv_1_normal_right1 (None, 8, 8, 176) 32560 ['activation_155[0][0]']
_10 (SeparableConv2D)
separable_conv_1_normal_left2_ (None, 8, 8, 176) 35376 ['activation_157[0][0]']
10 (SeparableConv2D)
separable_conv_1_normal_right2 (None, 8, 8, 176) 32560 ['activation_159[0][0]']
_10 (SeparableConv2D)
separable_conv_1_normal_left5_ (None, 8, 8, 176) 32560 ['activation_161[0][0]']
10 (SeparableConv2D)
separable_conv_1_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_1_normal_left1_1
t1_10 (BatchNormalization) 0[0][0]']
separable_conv_1_bn_normal_rig (None, 8, 8, 176) 704 ['separable_conv_1_normal_right1_
ht1_10 (BatchNormalization) 10[0][0]']
separable_conv_1_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_1_normal_left2_1
t2_10 (BatchNormalization) 0[0][0]']
separable_conv_1_bn_normal_rig (None, 8, 8, 176) 704 ['separable_conv_1_normal_right2_
ht2_10 (BatchNormalization) 10[0][0]']
separable_conv_1_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_1_normal_left5_1
t5_10 (BatchNormalization) 0[0][0]']
activation_154 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_normal_left
1_10[0][0]']
activation_156 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_normal_righ
t1_10[0][0]']
activation_158 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_normal_left
2_10[0][0]']
activation_160 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_normal_righ
t2_10[0][0]']
activation_162 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_normal_left
5_10[0][0]']
separable_conv_2_normal_left1_ (None, 8, 8, 176) 35376 ['activation_154[0][0]']
10 (SeparableConv2D)
separable_conv_2_normal_right1 (None, 8, 8, 176) 32560 ['activation_156[0][0]']
_10 (SeparableConv2D)
separable_conv_2_normal_left2_ (None, 8, 8, 176) 35376 ['activation_158[0][0]']
10 (SeparableConv2D)
separable_conv_2_normal_right2 (None, 8, 8, 176) 32560 ['activation_160[0][0]']
_10 (SeparableConv2D)
separable_conv_2_normal_left5_ (None, 8, 8, 176) 32560 ['activation_162[0][0]']
10 (SeparableConv2D)
separable_conv_2_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_2_normal_left1_1
t1_10 (BatchNormalization) 0[0][0]']
separable_conv_2_bn_normal_rig (None, 8, 8, 176) 704 ['separable_conv_2_normal_right1_
ht1_10 (BatchNormalization) 10[0][0]']
separable_conv_2_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_2_normal_left2_1
t2_10 (BatchNormalization) 0[0][0]']
separable_conv_2_bn_normal_rig (None, 8, 8, 176) 704 ['separable_conv_2_normal_right2_
ht2_10 (BatchNormalization) 10[0][0]']
normal_left3_10 (AveragePoolin (None, 8, 8, 176) 0 ['normal_bn_1_10[0][0]']
g2D)
normal_left4_10 (AveragePoolin (None, 8, 8, 176) 0 ['adjust_bn_10[0][0]']
g2D)
normal_right4_10 (AveragePooli (None, 8, 8, 176) 0 ['adjust_bn_10[0][0]']
ng2D)
separable_conv_2_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_2_normal_left5_1
t5_10 (BatchNormalization) 0[0][0]']
normal_add_1_10 (Add) (None, 8, 8, 176) 0 ['separable_conv_2_bn_normal_left
1_10[0][0]',
'separable_conv_2_bn_normal_righ
t1_10[0][0]']
normal_add_2_10 (Add) (None, 8, 8, 176) 0 ['separable_conv_2_bn_normal_left
2_10[0][0]',
'separable_conv_2_bn_normal_righ
t2_10[0][0]']
normal_add_3_10 (Add) (None, 8, 8, 176) 0 ['normal_left3_10[0][0]',
'adjust_bn_10[0][0]']
normal_add_4_10 (Add) (None, 8, 8, 176) 0 ['normal_left4_10[0][0]',
'normal_right4_10[0][0]']
normal_add_5_10 (Add) (None, 8, 8, 176) 0 ['separable_conv_2_bn_normal_left
5_10[0][0]',
'normal_bn_1_10[0][0]']
normal_concat_10 (Concatenate) (None, 8, 8, 1056) 0 ['adjust_bn_10[0][0]',
'normal_add_1_10[0][0]',
'normal_add_2_10[0][0]',
'normal_add_3_10[0][0]',
'normal_add_4_10[0][0]',
'normal_add_5_10[0][0]']
activation_163 (Activation) (None, 8, 8, 1056) 0 ['normal_concat_9[0][0]']
activation_164 (Activation) (None, 8, 8, 1056) 0 ['normal_concat_10[0][0]']
adjust_conv_projection_11 (Con (None, 8, 8, 176) 185856 ['activation_163[0][0]']
v2D)
normal_conv_1_11 (Conv2D) (None, 8, 8, 176) 185856 ['activation_164[0][0]']
adjust_bn_11 (BatchNormalizati (None, 8, 8, 176) 704 ['adjust_conv_projection_11[0][0]
on) ']
normal_bn_1_11 (BatchNormaliza (None, 8, 8, 176) 704 ['normal_conv_1_11[0][0]']
tion)
activation_165 (Activation) (None, 8, 8, 176) 0 ['normal_bn_1_11[0][0]']
activation_167 (Activation) (None, 8, 8, 176) 0 ['adjust_bn_11[0][0]']
activation_169 (Activation) (None, 8, 8, 176) 0 ['adjust_bn_11[0][0]']
activation_171 (Activation) (None, 8, 8, 176) 0 ['adjust_bn_11[0][0]']
activation_173 (Activation) (None, 8, 8, 176) 0 ['normal_bn_1_11[0][0]']
separable_conv_1_normal_left1_ (None, 8, 8, 176) 35376 ['activation_165[0][0]']
11 (SeparableConv2D)
separable_conv_1_normal_right1 (None, 8, 8, 176) 32560 ['activation_167[0][0]']
_11 (SeparableConv2D)
separable_conv_1_normal_left2_ (None, 8, 8, 176) 35376 ['activation_169[0][0]']
11 (SeparableConv2D)
separable_conv_1_normal_right2 (None, 8, 8, 176) 32560 ['activation_171[0][0]']
_11 (SeparableConv2D)
separable_conv_1_normal_left5_ (None, 8, 8, 176) 32560 ['activation_173[0][0]']
11 (SeparableConv2D)
separable_conv_1_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_1_normal_left1_1
t1_11 (BatchNormalization) 1[0][0]']
separable_conv_1_bn_normal_rig (None, 8, 8, 176) 704 ['separable_conv_1_normal_right1_
ht1_11 (BatchNormalization) 11[0][0]']
separable_conv_1_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_1_normal_left2_1
t2_11 (BatchNormalization) 1[0][0]']
separable_conv_1_bn_normal_rig (None, 8, 8, 176) 704 ['separable_conv_1_normal_right2_
ht2_11 (BatchNormalization) 11[0][0]']
separable_conv_1_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_1_normal_left5_1
t5_11 (BatchNormalization) 1[0][0]']
activation_166 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_normal_left
1_11[0][0]']
activation_168 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_normal_righ
t1_11[0][0]']
activation_170 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_normal_left
2_11[0][0]']
activation_172 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_normal_righ
t2_11[0][0]']
activation_174 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_normal_left
5_11[0][0]']
separable_conv_2_normal_left1_ (None, 8, 8, 176) 35376 ['activation_166[0][0]']
11 (SeparableConv2D)
separable_conv_2_normal_right1 (None, 8, 8, 176) 32560 ['activation_168[0][0]']
_11 (SeparableConv2D)
separable_conv_2_normal_left2_ (None, 8, 8, 176) 35376 ['activation_170[0][0]']
11 (SeparableConv2D)
separable_conv_2_normal_right2 (None, 8, 8, 176) 32560 ['activation_172[0][0]']
_11 (SeparableConv2D)
separable_conv_2_normal_left5_ (None, 8, 8, 176) 32560 ['activation_174[0][0]']
11 (SeparableConv2D)
separable_conv_2_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_2_normal_left1_1
t1_11 (BatchNormalization) 1[0][0]']
separable_conv_2_bn_normal_rig (None, 8, 8, 176) 704 ['separable_conv_2_normal_right1_
ht1_11 (BatchNormalization) 11[0][0]']
separable_conv_2_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_2_normal_left2_1
t2_11 (BatchNormalization) 1[0][0]']
separable_conv_2_bn_normal_rig (None, 8, 8, 176) 704 ['separable_conv_2_normal_right2_
ht2_11 (BatchNormalization) 11[0][0]']
normal_left3_11 (AveragePoolin (None, 8, 8, 176) 0 ['normal_bn_1_11[0][0]']
g2D)
normal_left4_11 (AveragePoolin (None, 8, 8, 176) 0 ['adjust_bn_11[0][0]']
g2D)
normal_right4_11 (AveragePooli (None, 8, 8, 176) 0 ['adjust_bn_11[0][0]']
ng2D)
separable_conv_2_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_2_normal_left5_1
t5_11 (BatchNormalization) 1[0][0]']
normal_add_1_11 (Add) (None, 8, 8, 176) 0 ['separable_conv_2_bn_normal_left
1_11[0][0]',
'separable_conv_2_bn_normal_righ
t1_11[0][0]']
normal_add_2_11 (Add) (None, 8, 8, 176) 0 ['separable_conv_2_bn_normal_left
2_11[0][0]',
'separable_conv_2_bn_normal_righ
t2_11[0][0]']
normal_add_3_11 (Add) (None, 8, 8, 176) 0 ['normal_left3_11[0][0]',
'adjust_bn_11[0][0]']
normal_add_4_11 (Add) (None, 8, 8, 176) 0 ['normal_left4_11[0][0]',
'normal_right4_11[0][0]']
normal_add_5_11 (Add) (None, 8, 8, 176) 0 ['separable_conv_2_bn_normal_left
5_11[0][0]',
'normal_bn_1_11[0][0]']
normal_concat_11 (Concatenate) (None, 8, 8, 1056) 0 ['adjust_bn_11[0][0]',
'normal_add_1_11[0][0]',
'normal_add_2_11[0][0]',
'normal_add_3_11[0][0]',
'normal_add_4_11[0][0]',
'normal_add_5_11[0][0]']
activation_175 (Activation) (None, 8, 8, 1056) 0 ['normal_concat_10[0][0]']
activation_176 (Activation) (None, 8, 8, 1056) 0 ['normal_concat_11[0][0]']
adjust_conv_projection_12 (Con (None, 8, 8, 176) 185856 ['activation_175[0][0]']
v2D)
normal_conv_1_12 (Conv2D) (None, 8, 8, 176) 185856 ['activation_176[0][0]']
adjust_bn_12 (BatchNormalizati (None, 8, 8, 176) 704 ['adjust_conv_projection_12[0][0]
on) ']
normal_bn_1_12 (BatchNormaliza (None, 8, 8, 176) 704 ['normal_conv_1_12[0][0]']
tion)
activation_177 (Activation) (None, 8, 8, 176) 0 ['normal_bn_1_12[0][0]']
activation_179 (Activation) (None, 8, 8, 176) 0 ['adjust_bn_12[0][0]']
activation_181 (Activation) (None, 8, 8, 176) 0 ['adjust_bn_12[0][0]']
activation_183 (Activation) (None, 8, 8, 176) 0 ['adjust_bn_12[0][0]']
activation_185 (Activation) (None, 8, 8, 176) 0 ['normal_bn_1_12[0][0]']
separable_conv_1_normal_left1_ (None, 8, 8, 176) 35376 ['activation_177[0][0]']
12 (SeparableConv2D)
separable_conv_1_normal_right1 (None, 8, 8, 176) 32560 ['activation_179[0][0]']
_12 (SeparableConv2D)
separable_conv_1_normal_left2_ (None, 8, 8, 176) 35376 ['activation_181[0][0]']
12 (SeparableConv2D)
separable_conv_1_normal_right2 (None, 8, 8, 176) 32560 ['activation_183[0][0]']
_12 (SeparableConv2D)
separable_conv_1_normal_left5_ (None, 8, 8, 176) 32560 ['activation_185[0][0]']
12 (SeparableConv2D)
separable_conv_1_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_1_normal_left1_1
t1_12 (BatchNormalization) 2[0][0]']
separable_conv_1_bn_normal_rig (None, 8, 8, 176) 704 ['separable_conv_1_normal_right1_
ht1_12 (BatchNormalization) 12[0][0]']
separable_conv_1_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_1_normal_left2_1
t2_12 (BatchNormalization) 2[0][0]']
separable_conv_1_bn_normal_rig (None, 8, 8, 176) 704 ['separable_conv_1_normal_right2_
ht2_12 (BatchNormalization) 12[0][0]']
separable_conv_1_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_1_normal_left5_1
t5_12 (BatchNormalization) 2[0][0]']
activation_178 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_normal_left
1_12[0][0]']
activation_180 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_normal_righ
t1_12[0][0]']
activation_182 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_normal_left
2_12[0][0]']
activation_184 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_normal_righ
t2_12[0][0]']
activation_186 (Activation) (None, 8, 8, 176) 0 ['separable_conv_1_bn_normal_left
5_12[0][0]']
separable_conv_2_normal_left1_ (None, 8, 8, 176) 35376 ['activation_178[0][0]']
12 (SeparableConv2D)
separable_conv_2_normal_right1 (None, 8, 8, 176) 32560 ['activation_180[0][0]']
_12 (SeparableConv2D)
separable_conv_2_normal_left2_ (None, 8, 8, 176) 35376 ['activation_182[0][0]']
12 (SeparableConv2D)
separable_conv_2_normal_right2 (None, 8, 8, 176) 32560 ['activation_184[0][0]']
_12 (SeparableConv2D)
separable_conv_2_normal_left5_ (None, 8, 8, 176) 32560 ['activation_186[0][0]']
12 (SeparableConv2D)
separable_conv_2_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_2_normal_left1_1
t1_12 (BatchNormalization) 2[0][0]']
separable_conv_2_bn_normal_rig (None, 8, 8, 176) 704 ['separable_conv_2_normal_right1_
ht1_12 (BatchNormalization) 12[0][0]']
separable_conv_2_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_2_normal_left2_1
t2_12 (BatchNormalization) 2[0][0]']
separable_conv_2_bn_normal_rig (None, 8, 8, 176) 704 ['separable_conv_2_normal_right2_
ht2_12 (BatchNormalization) 12[0][0]']
normal_left3_12 (AveragePoolin (None, 8, 8, 176) 0 ['normal_bn_1_12[0][0]']
g2D)
normal_left4_12 (AveragePoolin (None, 8, 8, 176) 0 ['adjust_bn_12[0][0]']
g2D)
normal_right4_12 (AveragePooli (None, 8, 8, 176) 0 ['adjust_bn_12[0][0]']
ng2D)
separable_conv_2_bn_normal_lef (None, 8, 8, 176) 704 ['separable_conv_2_normal_left5_1
t5_12 (BatchNormalization) 2[0][0]']
normal_add_1_12 (Add) (None, 8, 8, 176) 0 ['separable_conv_2_bn_normal_left
1_12[0][0]',
'separable_conv_2_bn_normal_righ
t1_12[0][0]']
normal_add_2_12 (Add) (None, 8, 8, 176) 0 ['separable_conv_2_bn_normal_left
2_12[0][0]',
'separable_conv_2_bn_normal_righ
t2_12[0][0]']
normal_add_3_12 (Add) (None, 8, 8, 176) 0 ['normal_left3_12[0][0]',
'adjust_bn_12[0][0]']
normal_add_4_12 (Add) (None, 8, 8, 176) 0 ['normal_left4_12[0][0]',
'normal_right4_12[0][0]']
normal_add_5_12 (Add) (None, 8, 8, 176) 0 ['separable_conv_2_bn_normal_left
5_12[0][0]',
'normal_bn_1_12[0][0]']
normal_concat_12 (Concatenate) (None, 8, 8, 1056) 0 ['adjust_bn_12[0][0]',
'normal_add_1_12[0][0]',
'normal_add_2_12[0][0]',
'normal_add_3_12[0][0]',
'normal_add_4_12[0][0]',
'normal_add_5_12[0][0]']
activation_187 (Activation) (None, 8, 8, 1056) 0 ['normal_concat_12[0][0]']
global_average_pooling2d_1 (Gl (None, 1056) 0 ['activation_187[0][0]']
obalAveragePooling2D)
dense_8 (Dense) (None, 6) 6342 ['global_average_pooling2d_1[0][0
]']
==================================================================================================
Total params: 4,276,058
Trainable params: 4,239,320
Non-trainable params: 36,738
__________________________________________________________________________________________________
hist6 = model6.fit(train_set, validation_data=test_set, epochs=50, steps_per_epoch=len(train_set), validation_steps=len(test_set))
Epoch 1/50 850/850 [==============================] - 145s 154ms/step - loss: 0.3357 - accuracy: 0.8764 - f1_m: 0.8531 - precision_m: 0.8794 - recall_m: 0.8400 - val_loss: 0.4962 - val_accuracy: 0.8373 - val_f1_m: 0.7668 - val_precision_m: 0.8722 - val_recall_m: 0.7142 Epoch 2/50 850/850 [==============================] - 125s 147ms/step - loss: 0.0966 - accuracy: 0.9606 - f1_m: 0.9602 - precision_m: 0.9618 - recall_m: 0.9594 - val_loss: 0.1794 - val_accuracy: 0.9410 - val_f1_m: 0.9387 - val_precision_m: 0.9462 - val_recall_m: 0.9349 Epoch 3/50 850/850 [==============================] - 122s 144ms/step - loss: 0.0720 - accuracy: 0.9770 - f1_m: 0.9771 - precision_m: 0.9771 - recall_m: 0.9771 - val_loss: 0.1893 - val_accuracy: 0.9519 - val_f1_m: 0.9498 - val_precision_m: 0.9561 - val_recall_m: 0.9467 Epoch 4/50 850/850 [==============================] - 124s 146ms/step - loss: 0.0594 - accuracy: 0.9800 - f1_m: 0.9798 - precision_m: 0.9818 - recall_m: 0.9788 - val_loss: 0.4445 - val_accuracy: 0.7953 - val_f1_m: 0.8047 - val_precision_m: 0.8679 - val_recall_m: 0.7731 Epoch 5/50 850/850 [==============================] - 126s 148ms/step - loss: 0.0530 - accuracy: 0.9835 - f1_m: 0.9833 - precision_m: 0.9841 - recall_m: 0.9829 - val_loss: 0.3762 - val_accuracy: 0.8580 - val_f1_m: 0.8348 - val_precision_m: 0.9156 - val_recall_m: 0.7943 Epoch 6/50 850/850 [==============================] - 126s 149ms/step - loss: 0.0769 - accuracy: 0.9688 - f1_m: 0.9684 - precision_m: 0.9688 - recall_m: 0.9682 - val_loss: 0.4662 - val_accuracy: 0.7873 - val_f1_m: 0.8204 - val_precision_m: 0.9104 - val_recall_m: 0.7755 Epoch 7/50 850/850 [==============================] - 124s 145ms/step - loss: 0.0430 - accuracy: 0.9853 - f1_m: 0.9853 - precision_m: 0.9853 - recall_m: 0.9853 - val_loss: 0.6122 - val_accuracy: 0.7745 - val_f1_m: 0.7959 - val_precision_m: 0.8396 - val_recall_m: 0.7741 Epoch 8/50 850/850 [==============================] - 126s 148ms/step - loss: 0.0427 - accuracy: 0.9841 - f1_m: 0.9841 - precision_m: 0.9841 - recall_m: 0.9841 - val_loss: 0.5952 - val_accuracy: 0.7901 - val_f1_m: 0.7932 - val_precision_m: 0.8156 - val_recall_m: 0.7821 Epoch 9/50 850/850 [==============================] - 125s 147ms/step - loss: 0.0420 - accuracy: 0.9841 - f1_m: 0.9841 - precision_m: 0.9841 - recall_m: 0.9841 - val_loss: 0.6571 - val_accuracy: 0.7717 - val_f1_m: 0.7715 - val_precision_m: 0.7825 - val_recall_m: 0.7660 Epoch 10/50 850/850 [==============================] - 124s 146ms/step - loss: 0.0361 - accuracy: 0.9871 - f1_m: 0.9871 - precision_m: 0.9871 - recall_m: 0.9871 - val_loss: 0.5860 - val_accuracy: 0.7873 - val_f1_m: 0.7926 - val_precision_m: 0.8090 - val_recall_m: 0.7844 Epoch 11/50 850/850 [==============================] - 122s 144ms/step - loss: 0.0342 - accuracy: 0.9865 - f1_m: 0.9863 - precision_m: 0.9871 - recall_m: 0.9859 - val_loss: 0.8704 - val_accuracy: 0.7830 - val_f1_m: 0.7866 - val_precision_m: 0.7948 - val_recall_m: 0.7825 Epoch 12/50 850/850 [==============================] - 123s 144ms/step - loss: 0.0428 - accuracy: 0.9823 - f1_m: 0.9824 - precision_m: 0.9824 - recall_m: 0.9824 - val_loss: 0.9797 - val_accuracy: 0.7995 - val_f1_m: 0.8038 - val_precision_m: 0.8160 - val_recall_m: 0.7976 Epoch 13/50 850/850 [==============================] - 126s 148ms/step - loss: 0.0353 - accuracy: 0.9859 - f1_m: 0.9859 - precision_m: 0.9859 - recall_m: 0.9859 - val_loss: 1.0755 - val_accuracy: 0.7844 - val_f1_m: 0.7881 - val_precision_m: 0.7962 - val_recall_m: 0.7840 Epoch 14/50 850/850 [==============================] - 123s 144ms/step - loss: 0.0360 - accuracy: 0.9841 - f1_m: 0.9841 - precision_m: 0.9841 - recall_m: 0.9841 - val_loss: 1.5988 - val_accuracy: 0.7863 - val_f1_m: 0.7879 - val_precision_m: 0.7948 - val_recall_m: 0.7844 Epoch 15/50 850/850 [==============================] - 124s 146ms/step - loss: 0.0369 - accuracy: 0.9847 - f1_m: 0.9843 - precision_m: 0.9847 - recall_m: 0.9841 - val_loss: 1.9501 - val_accuracy: 0.7816 - val_f1_m: 0.7829 - val_precision_m: 0.7901 - val_recall_m: 0.7792 Epoch 16/50 850/850 [==============================] - 124s 146ms/step - loss: 0.0301 - accuracy: 0.9853 - f1_m: 0.9853 - precision_m: 0.9853 - recall_m: 0.9853 - val_loss: 1.5132 - val_accuracy: 0.7991 - val_f1_m: 0.8030 - val_precision_m: 0.8137 - val_recall_m: 0.7976 Epoch 17/50 850/850 [==============================] - 126s 148ms/step - loss: 0.0278 - accuracy: 0.9853 - f1_m: 0.9853 - precision_m: 0.9853 - recall_m: 0.9853 - val_loss: 2.1301 - val_accuracy: 0.7792 - val_f1_m: 0.7810 - val_precision_m: 0.7863 - val_recall_m: 0.7783 Epoch 18/50 850/850 [==============================] - 123s 145ms/step - loss: 0.0326 - accuracy: 0.9859 - f1_m: 0.9855 - precision_m: 0.9859 - recall_m: 0.9853 - val_loss: 1.9419 - val_accuracy: 0.7811 - val_f1_m: 0.7844 - val_precision_m: 0.7929 - val_recall_m: 0.7802 Epoch 19/50 850/850 [==============================] - 123s 145ms/step - loss: 0.0284 - accuracy: 0.9876 - f1_m: 0.9876 - precision_m: 0.9876 - recall_m: 0.9876 - val_loss: 2.3023 - val_accuracy: 0.7745 - val_f1_m: 0.7770 - val_precision_m: 0.7849 - val_recall_m: 0.7731 Epoch 20/50 850/850 [==============================] - 124s 146ms/step - loss: 0.0296 - accuracy: 0.9835 - f1_m: 0.9835 - precision_m: 0.9835 - recall_m: 0.9835 - val_loss: 2.9813 - val_accuracy: 0.7750 - val_f1_m: 0.7758 - val_precision_m: 0.7783 - val_recall_m: 0.7745 Epoch 21/50 850/850 [==============================] - 123s 145ms/step - loss: 0.0252 - accuracy: 0.9853 - f1_m: 0.9853 - precision_m: 0.9853 - recall_m: 0.9853 - val_loss: 3.4731 - val_accuracy: 0.7717 - val_f1_m: 0.7722 - val_precision_m: 0.7741 - val_recall_m: 0.7712 Epoch 22/50 850/850 [==============================] - 123s 144ms/step - loss: 0.0319 - accuracy: 0.9829 - f1_m: 0.9825 - precision_m: 0.9829 - recall_m: 0.9824 - val_loss: 1.6217 - val_accuracy: 0.8170 - val_f1_m: 0.8231 - val_precision_m: 0.8363 - val_recall_m: 0.8165 Epoch 23/50 850/850 [==============================] - 123s 144ms/step - loss: 0.0342 - accuracy: 0.9835 - f1_m: 0.9835 - precision_m: 0.9835 - recall_m: 0.9835 - val_loss: 2.2911 - val_accuracy: 0.8028 - val_f1_m: 0.8027 - val_precision_m: 0.8052 - val_recall_m: 0.8014 Epoch 24/50 850/850 [==============================] - 125s 147ms/step - loss: 0.0248 - accuracy: 0.9894 - f1_m: 0.9892 - precision_m: 0.9900 - recall_m: 0.9888 - val_loss: 1.2010 - val_accuracy: 0.8462 - val_f1_m: 0.8475 - val_precision_m: 0.8509 - val_recall_m: 0.8458 Epoch 25/50 850/850 [==============================] - 124s 146ms/step - loss: 0.0239 - accuracy: 0.9859 - f1_m: 0.9859 - precision_m: 0.9859 - recall_m: 0.9859 - val_loss: 0.8946 - val_accuracy: 0.8679 - val_f1_m: 0.8686 - val_precision_m: 0.8708 - val_recall_m: 0.8675 Epoch 26/50 850/850 [==============================] - 124s 146ms/step - loss: 0.0237 - accuracy: 0.9847 - f1_m: 0.9847 - precision_m: 0.9847 - recall_m: 0.9847 - val_loss: 2.4620 - val_accuracy: 0.8118 - val_f1_m: 0.8123 - val_precision_m: 0.8151 - val_recall_m: 0.8108 Epoch 27/50 850/850 [==============================] - 133s 157ms/step - loss: 0.0250 - accuracy: 0.9853 - f1_m: 0.9853 - precision_m: 0.9853 - recall_m: 0.9853 - val_loss: 1.4220 - val_accuracy: 0.8594 - val_f1_m: 0.8597 - val_precision_m: 0.8623 - val_recall_m: 0.8585 Epoch 28/50 850/850 [==============================] - 124s 146ms/step - loss: 0.0223 - accuracy: 0.9859 - f1_m: 0.9859 - precision_m: 0.9859 - recall_m: 0.9859 - val_loss: 0.4710 - val_accuracy: 0.9151 - val_f1_m: 0.9162 - val_precision_m: 0.9184 - val_recall_m: 0.9151 Epoch 29/50 850/850 [==============================] - 123s 145ms/step - loss: 0.0509 - accuracy: 0.9741 - f1_m: 0.9739 - precision_m: 0.9747 - recall_m: 0.9735 - val_loss: 3.3543 - val_accuracy: 0.7665 - val_f1_m: 0.7667 - val_precision_m: 0.7670 - val_recall_m: 0.7665 Epoch 30/50 850/850 [==============================] - 125s 148ms/step - loss: 0.0272 - accuracy: 0.9841 - f1_m: 0.9839 - precision_m: 0.9847 - recall_m: 0.9835 - val_loss: 3.4221 - val_accuracy: 0.7679 - val_f1_m: 0.7668 - val_precision_m: 0.7693 - val_recall_m: 0.7656 Epoch 31/50 850/850 [==============================] - 127s 149ms/step - loss: 0.0250 - accuracy: 0.9859 - f1_m: 0.9859 - precision_m: 0.9859 - recall_m: 0.9859 - val_loss: 3.2722 - val_accuracy: 0.7486 - val_f1_m: 0.7476 - val_precision_m: 0.7486 - val_recall_m: 0.7472 Epoch 32/50 850/850 [==============================] - 126s 149ms/step - loss: 0.0288 - accuracy: 0.9859 - f1_m: 0.9859 - precision_m: 0.9859 - recall_m: 0.9859 - val_loss: 3.8468 - val_accuracy: 0.7741 - val_f1_m: 0.7737 - val_precision_m: 0.7750 - val_recall_m: 0.7731 Epoch 33/50 850/850 [==============================] - 124s 146ms/step - loss: 0.0257 - accuracy: 0.9859 - f1_m: 0.9855 - precision_m: 0.9859 - recall_m: 0.9853 - val_loss: 2.7287 - val_accuracy: 0.7769 - val_f1_m: 0.7780 - val_precision_m: 0.7802 - val_recall_m: 0.7769 Epoch 34/50 850/850 [==============================] - 124s 146ms/step - loss: 0.0225 - accuracy: 0.9853 - f1_m: 0.9855 - precision_m: 0.9859 - recall_m: 0.9853 - val_loss: 4.1407 - val_accuracy: 0.7854 - val_f1_m: 0.7852 - val_precision_m: 0.7868 - val_recall_m: 0.7844 Epoch 35/50 850/850 [==============================] - 123s 145ms/step - loss: 0.0261 - accuracy: 0.9871 - f1_m: 0.9859 - precision_m: 0.9871 - recall_m: 0.9853 - val_loss: 3.1000 - val_accuracy: 0.7830 - val_f1_m: 0.7835 - val_precision_m: 0.7844 - val_recall_m: 0.7830 Epoch 36/50 850/850 [==============================] - 125s 147ms/step - loss: 0.0198 - accuracy: 0.9894 - f1_m: 0.9894 - precision_m: 0.9894 - recall_m: 0.9894 - val_loss: 2.7607 - val_accuracy: 0.7948 - val_f1_m: 0.7947 - val_precision_m: 0.7953 - val_recall_m: 0.7943 Epoch 37/50 850/850 [==============================] - 125s 147ms/step - loss: 0.0255 - accuracy: 0.9847 - f1_m: 0.9845 - precision_m: 0.9853 - recall_m: 0.9841 - val_loss: 2.5120 - val_accuracy: 0.7792 - val_f1_m: 0.7802 - val_precision_m: 0.7821 - val_recall_m: 0.7792 Epoch 38/50 850/850 [==============================] - 130s 153ms/step - loss: 0.0212 - accuracy: 0.9847 - f1_m: 0.9847 - precision_m: 0.9847 - recall_m: 0.9847 - val_loss: 2.4266 - val_accuracy: 0.7774 - val_f1_m: 0.7810 - val_precision_m: 0.7901 - val_recall_m: 0.7764 Epoch 39/50 850/850 [==============================] - 125s 147ms/step - loss: 0.0223 - accuracy: 0.9865 - f1_m: 0.9863 - precision_m: 0.9871 - recall_m: 0.9859 - val_loss: 2.2071 - val_accuracy: 0.7821 - val_f1_m: 0.7821 - val_precision_m: 0.7821 - val_recall_m: 0.7821 Epoch 40/50 850/850 [==============================] - 127s 150ms/step - loss: 0.0235 - accuracy: 0.9859 - f1_m: 0.9861 - precision_m: 0.9865 - recall_m: 0.9859 - val_loss: 1.8703 - val_accuracy: 0.8245 - val_f1_m: 0.8242 - val_precision_m: 0.8245 - val_recall_m: 0.8241 Epoch 41/50 850/850 [==============================] - 123s 145ms/step - loss: 0.0202 - accuracy: 0.9900 - f1_m: 0.9900 - precision_m: 0.9900 - recall_m: 0.9900 - val_loss: 2.2432 - val_accuracy: 0.8245 - val_f1_m: 0.8241 - val_precision_m: 0.8250 - val_recall_m: 0.8236 Epoch 42/50 850/850 [==============================] - 122s 144ms/step - loss: 0.0200 - accuracy: 0.9888 - f1_m: 0.9888 - precision_m: 0.9888 - recall_m: 0.9888 - val_loss: 2.8378 - val_accuracy: 0.8075 - val_f1_m: 0.8075 - val_precision_m: 0.8075 - val_recall_m: 0.8075 Epoch 43/50 850/850 [==============================] - 124s 146ms/step - loss: 0.0211 - accuracy: 0.9859 - f1_m: 0.9861 - precision_m: 0.9865 - recall_m: 0.9859 - val_loss: 1.6155 - val_accuracy: 0.8316 - val_f1_m: 0.8314 - val_precision_m: 0.8321 - val_recall_m: 0.8311 Epoch 44/50 850/850 [==============================] - 128s 150ms/step - loss: 0.0234 - accuracy: 0.9871 - f1_m: 0.9873 - precision_m: 0.9876 - recall_m: 0.9871 - val_loss: 2.3112 - val_accuracy: 0.7769 - val_f1_m: 0.7769 - val_precision_m: 0.7778 - val_recall_m: 0.7764 Epoch 45/50 850/850 [==============================] - 127s 149ms/step - loss: 0.0305 - accuracy: 0.9829 - f1_m: 0.9822 - precision_m: 0.9829 - recall_m: 0.9818 - val_loss: 2.4784 - val_accuracy: 0.7637 - val_f1_m: 0.7632 - val_precision_m: 0.7679 - val_recall_m: 0.7608 Epoch 46/50 850/850 [==============================] - 120s 142ms/step - loss: 0.0281 - accuracy: 0.9835 - f1_m: 0.9835 - precision_m: 0.9835 - recall_m: 0.9835 - val_loss: 3.6891 - val_accuracy: 0.7840 - val_f1_m: 0.7835 - val_precision_m: 0.7854 - val_recall_m: 0.7825 Epoch 47/50 850/850 [==============================] - 117s 138ms/step - loss: 0.0228 - accuracy: 0.9865 - f1_m: 0.9861 - precision_m: 0.9865 - recall_m: 0.9859 - val_loss: 3.8025 - val_accuracy: 0.7797 - val_f1_m: 0.7794 - val_precision_m: 0.7816 - val_recall_m: 0.7783 Epoch 48/50 850/850 [==============================] - 110s 130ms/step - loss: 0.0296 - accuracy: 0.9841 - f1_m: 0.9845 - precision_m: 0.9853 - recall_m: 0.9841 - val_loss: 2.8577 - val_accuracy: 0.7816 - val_f1_m: 0.7813 - val_precision_m: 0.7854 - val_recall_m: 0.7792 Epoch 49/50 850/850 [==============================] - 110s 130ms/step - loss: 0.0225 - accuracy: 0.9853 - f1_m: 0.9853 - precision_m: 0.9853 - recall_m: 0.9853 - val_loss: 2.8738 - val_accuracy: 0.7807 - val_f1_m: 0.7803 - val_precision_m: 0.7816 - val_recall_m: 0.7797 Epoch 50/50 850/850 [==============================] - 112s 131ms/step - loss: 0.0273 - accuracy: 0.9859 - f1_m: 0.9859 - precision_m: 0.9859 - recall_m: 0.9859 - val_loss: 4.2742 - val_accuracy: 0.7792 - val_f1_m: 0.7792 - val_precision_m: 0.7792 - val_recall_m: 0.7792
model6.save('nasnet.h5')
dl_acc = hist6.history["val_accuracy"][49]
dl_prec = hist6.history["val_precision_m"][49]
dl_rec = hist6.history["val_recall_m"][49]
dl_f1 = hist6.history["val_f1_m"][49]
storeResults('NASNetMobile',dl_acc,dl_prec,dl_rec,dl_f1)
x=hist6
plt.figure(figsize=(20,10))
plt.subplot(1, 2, 1)
plt.suptitle('Optimizer : adam', fontsize=10)
plt.ylabel('Loss', fontsize=16)
plt.plot(x.history['loss'], label='Training Loss')
plt.plot(x.history['val_loss'], label='Validation Loss')
plt.legend(loc='upper right')
plt.subplot(1, 2, 2)
plt.ylabel('Accuracy', fontsize=16)
plt.plot(x.history['accuracy'], label='Training Accuracy')
plt.plot(x.history['val_accuracy'], label='Validation Accuracy')
plt.legend(loc='lower right')
plt.show()
from tensorflow.keras.models import load_model
from tensorflow.keras.layers import Average
import efficientnet.keras
def ensemble():
model_1 = load_model('xception.h5', compile=False)
model_1 = Model(inputs = model_1.inputs, outputs = model_1.outputs, name = 'Xception')
model_2 = load_model('nasnet.h5', compile=False)
model_2 = Model(inputs = model_2.inputs, outputs = model_2.outputs, name = 'NASNetMobile')
models = [model_1, model_2]
models_input = Input(shape =(256,256,3))
models_output = [model(models_input) for model in models]
ensemble_output = Average()(models_output)
simple_average = Model(inputs = models_input, outputs = ensemble_output, name = 'Extension')
return simple_average
ext = ensemble()
ext.compile(optimizer='sgd',
loss = 'categorical_crossentropy',
metrics=["accuracy",f1_m,precision_m, recall_m])
ext.summary()
Model: "Extension"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_2 (InputLayer) [(None, 256, 256, 3 0 []
)]
Xception (Functional) (None, 6) 20873774 ['input_2[0][0]']
NASNetMobile (Functional) (None, 6) 4276058 ['input_2[0][0]']
average_1 (Average) (None, 6) 0 ['Xception[0][0]',
'NASNetMobile[0][0]']
==================================================================================================
Total params: 25,149,832
Trainable params: 25,058,566
Non-trainable params: 91,266
__________________________________________________________________________________________________
history1 = ext.fit(train_set, epochs=50, validation_data=test_set,steps_per_epoch=len(train_set), validation_steps=len(test_set),callbacks=[learning_rate_reduction, early_stop])
Epoch 1/50 850/850 [==============================] - 146s 172ms/step - loss: 0.0224 - accuracy: 0.9871 - f1_m: 0.9871 - precision_m: 0.9871 - recall_m: 0.9871 - val_loss: 0.2347 - val_accuracy: 0.9745 - val_f1_m: 0.8542 - val_precision_m: 0.9467 - val_recall_m: 0.8080 - lr: 0.0100 Epoch 2/50 850/850 [==============================] - 141s 166ms/step - loss: 0.0203 - accuracy: 0.9853 - f1_m: 0.9857 - precision_m: 0.9865 - recall_m: 0.9853 - val_loss: 0.2302 - val_accuracy: 0.9750 - val_f1_m: 0.8807 - val_precision_m: 0.9618 - val_recall_m: 0.8401 - lr: 0.0100 Epoch 3/50 850/850 [==============================] - 142s 167ms/step - loss: 0.0197 - accuracy: 0.9859 - f1_m: 0.9859 - precision_m: 0.9859 - recall_m: 0.9859 - val_loss: 0.2442 - val_accuracy: 0.9755 - val_f1_m: 0.8642 - val_precision_m: 0.9462 - val_recall_m: 0.8231 - lr: 0.0100 Epoch 4/50 850/850 [==============================] - 144s 170ms/step - loss: 0.0197 - accuracy: 0.9865 - f1_m: 0.9861 - precision_m: 0.9865 - recall_m: 0.9859 - val_loss: 0.2301 - val_accuracy: 0.9750 - val_f1_m: 0.8814 - val_precision_m: 0.9557 - val_recall_m: 0.8443 - lr: 0.0100 Epoch 5/50 850/850 [==============================] - 143s 168ms/step - loss: 0.0201 - accuracy: 0.9847 - f1_m: 0.9843 - precision_m: 0.9847 - recall_m: 0.9841 - val_loss: 0.2239 - val_accuracy: 0.9731 - val_f1_m: 0.8802 - val_precision_m: 0.9547 - val_recall_m: 0.8429 - lr: 0.0100 Epoch 6/50 850/850 [==============================] - ETA: 0s - loss: 0.0186 - accuracy: 0.9853 - f1_m: 0.9855 - precision_m: 0.9859 - recall_m: 0.9853 Epoch 6: ReduceLROnPlateau reducing learning rate to 0.0029999999329447745. 850/850 [==============================] - 142s 167ms/step - loss: 0.0186 - accuracy: 0.9853 - f1_m: 0.9855 - precision_m: 0.9859 - recall_m: 0.9853 - val_loss: 0.2356 - val_accuracy: 0.9722 - val_f1_m: 0.9042 - val_precision_m: 0.9580 - val_recall_m: 0.8774 - lr: 0.0100 Epoch 7/50 850/850 [==============================] - 141s 166ms/step - loss: 0.0198 - accuracy: 0.9876 - f1_m: 0.9869 - precision_m: 0.9876 - recall_m: 0.9865 - val_loss: 0.2300 - val_accuracy: 0.9741 - val_f1_m: 0.9091 - val_precision_m: 0.9689 - val_recall_m: 0.8792 - lr: 0.0030 Epoch 8/50 850/850 [==============================] - 143s 168ms/step - loss: 0.0181 - accuracy: 0.9882 - f1_m: 0.9886 - precision_m: 0.9894 - recall_m: 0.9882 - val_loss: 0.2202 - val_accuracy: 0.9759 - val_f1_m: 0.9168 - val_precision_m: 0.9665 - val_recall_m: 0.8920 - lr: 0.0030 Epoch 9/50 850/850 [==============================] - 141s 166ms/step - loss: 0.0188 - accuracy: 0.9859 - f1_m: 0.9861 - precision_m: 0.9865 - recall_m: 0.9859 - val_loss: 0.2297 - val_accuracy: 0.9745 - val_f1_m: 0.9228 - val_precision_m: 0.9703 - val_recall_m: 0.8991 - lr: 0.0030 Epoch 10/50 850/850 [==============================] - 142s 167ms/step - loss: 0.0185 - accuracy: 0.9876 - f1_m: 0.9873 - precision_m: 0.9876 - recall_m: 0.9871 - val_loss: 0.2369 - val_accuracy: 0.9759 - val_f1_m: 0.9198 - val_precision_m: 0.9689 - val_recall_m: 0.8953 - lr: 0.0030 Epoch 11/50 850/850 [==============================] - ETA: 0s - loss: 0.0190 - accuracy: 0.9876 - f1_m: 0.9876 - precision_m: 0.9876 - recall_m: 0.9876 Epoch 11: ReduceLROnPlateau reducing learning rate to 0.0009000000078231095. 850/850 [==============================] - 144s 169ms/step - loss: 0.0190 - accuracy: 0.9876 - f1_m: 0.9876 - precision_m: 0.9876 - recall_m: 0.9876 - val_loss: 0.2370 - val_accuracy: 0.9741 - val_f1_m: 0.9186 - val_precision_m: 0.9689 - val_recall_m: 0.8934 - lr: 0.0030 Epoch 12/50 850/850 [==============================] - 144s 170ms/step - loss: 0.0189 - accuracy: 0.9888 - f1_m: 0.9890 - precision_m: 0.9894 - recall_m: 0.9888 - val_loss: 0.2357 - val_accuracy: 0.9745 - val_f1_m: 0.9192 - val_precision_m: 0.9708 - val_recall_m: 0.8934 - lr: 9.0000e-04 Epoch 13/50 850/850 [==============================] - 144s 170ms/step - loss: 0.0181 - accuracy: 0.9871 - f1_m: 0.9871 - precision_m: 0.9871 - recall_m: 0.9871 - val_loss: 0.2229 - val_accuracy: 0.9745 - val_f1_m: 0.9215 - val_precision_m: 0.9731 - val_recall_m: 0.8958 - lr: 9.0000e-04 Epoch 14/50 850/850 [==============================] - ETA: 0s - loss: 0.0185 - accuracy: 0.9882 - f1_m: 0.9882 - precision_m: 0.9882 - recall_m: 0.9882 Epoch 14: ReduceLROnPlateau reducing learning rate to 0.00026999999536201356. 850/850 [==============================] - 143s 169ms/step - loss: 0.0185 - accuracy: 0.9882 - f1_m: 0.9882 - precision_m: 0.9882 - recall_m: 0.9882 - val_loss: 0.2382 - val_accuracy: 0.9741 - val_f1_m: 0.9189 - val_precision_m: 0.9651 - val_recall_m: 0.8958 - lr: 9.0000e-04 Epoch 15/50 850/850 [==============================] - 142s 167ms/step - loss: 0.0183 - accuracy: 0.9888 - f1_m: 0.9888 - precision_m: 0.9888 - recall_m: 0.9888 - val_loss: 0.2274 - val_accuracy: 0.9736 - val_f1_m: 0.9242 - val_precision_m: 0.9670 - val_recall_m: 0.9028 - lr: 2.7000e-04 Epoch 16/50 850/850 [==============================] - 141s 165ms/step - loss: 0.0185 - accuracy: 0.9900 - f1_m: 0.9900 - precision_m: 0.9900 - recall_m: 0.9900 - val_loss: 0.2328 - val_accuracy: 0.9745 - val_f1_m: 0.9241 - val_precision_m: 0.9665 - val_recall_m: 0.9028 - lr: 2.7000e-04 Epoch 17/50 850/850 [==============================] - ETA: 0s - loss: 0.0185 - accuracy: 0.9888 - f1_m: 0.9884 - precision_m: 0.9888 - recall_m: 0.9882 Epoch 17: ReduceLROnPlateau reducing learning rate to 8.099999686237424e-05. 850/850 [==============================] - 142s 167ms/step - loss: 0.0185 - accuracy: 0.9888 - f1_m: 0.9884 - precision_m: 0.9888 - recall_m: 0.9882 - val_loss: 0.2220 - val_accuracy: 0.9755 - val_f1_m: 0.9245 - val_precision_m: 0.9708 - val_recall_m: 0.9014 - lr: 2.7000e-04 Epoch 18/50 850/850 [==============================] - 142s 167ms/step - loss: 0.0179 - accuracy: 0.9912 - f1_m: 0.9912 - precision_m: 0.9912 - recall_m: 0.9912 - val_loss: 0.2337 - val_accuracy: 0.9731 - val_f1_m: 0.9192 - val_precision_m: 0.9623 - val_recall_m: 0.8976 - lr: 8.1000e-05 Epoch 19/50 850/850 [==============================] - 142s 167ms/step - loss: 0.0181 - accuracy: 0.9888 - f1_m: 0.9888 - precision_m: 0.9888 - recall_m: 0.9888 - val_loss: 0.2276 - val_accuracy: 0.9736 - val_f1_m: 0.9281 - val_precision_m: 0.9703 - val_recall_m: 0.9071 - lr: 8.1000e-05 Epoch 20/50 850/850 [==============================] - ETA: 0s - loss: 0.0187 - accuracy: 0.9882 - f1_m: 0.9882 - precision_m: 0.9894 - recall_m: 0.9876 Epoch 20: ReduceLROnPlateau reducing learning rate to 2.429999949526973e-05. 850/850 [==============================] - 144s 170ms/step - loss: 0.0187 - accuracy: 0.9882 - f1_m: 0.9882 - precision_m: 0.9894 - recall_m: 0.9876 - val_loss: 0.2330 - val_accuracy: 0.9750 - val_f1_m: 0.9332 - val_precision_m: 0.9712 - val_recall_m: 0.9142 - lr: 8.1000e-05 Epoch 21/50 850/850 [==============================] - 140s 164ms/step - loss: 0.0178 - accuracy: 0.9894 - f1_m: 0.9898 - precision_m: 0.9906 - recall_m: 0.9894 - val_loss: 0.2309 - val_accuracy: 0.9741 - val_f1_m: 0.9248 - val_precision_m: 0.9717 - val_recall_m: 0.9014 - lr: 2.4300e-05 Epoch 22/50 850/850 [==============================] - 142s 167ms/step - loss: 0.0183 - accuracy: 0.9871 - f1_m: 0.9871 - precision_m: 0.9871 - recall_m: 0.9871 - val_loss: 0.2256 - val_accuracy: 0.9755 - val_f1_m: 0.9259 - val_precision_m: 0.9712 - val_recall_m: 0.9033 - lr: 2.4300e-05 Epoch 23/50 850/850 [==============================] - ETA: 0s - loss: 0.0188 - accuracy: 0.9865 - f1_m: 0.9861 - precision_m: 0.9865 - recall_m: 0.9859 Epoch 23: ReduceLROnPlateau reducing learning rate to 7.289999848580919e-06. 850/850 [==============================] - 141s 166ms/step - loss: 0.0188 - accuracy: 0.9865 - f1_m: 0.9861 - precision_m: 0.9865 - recall_m: 0.9859 - val_loss: 0.2283 - val_accuracy: 0.9741 - val_f1_m: 0.9248 - val_precision_m: 0.9670 - val_recall_m: 0.9038 - lr: 2.4300e-05 Epoch 24/50 850/850 [==============================] - 144s 169ms/step - loss: 0.0185 - accuracy: 0.9853 - f1_m: 0.9849 - precision_m: 0.9853 - recall_m: 0.9847 - val_loss: 0.2221 - val_accuracy: 0.9726 - val_f1_m: 0.9217 - val_precision_m: 0.9642 - val_recall_m: 0.9005 - lr: 7.2900e-06 Epoch 25/50 850/850 [==============================] - 140s 164ms/step - loss: 0.0184 - accuracy: 0.9876 - f1_m: 0.9876 - precision_m: 0.9876 - recall_m: 0.9876 - val_loss: 0.2388 - val_accuracy: 0.9769 - val_f1_m: 0.9307 - val_precision_m: 0.9703 - val_recall_m: 0.9108 - lr: 7.2900e-06 Epoch 26/50 850/850 [==============================] - 140s 165ms/step - loss: 0.0178 - accuracy: 0.9900 - f1_m: 0.9900 - precision_m: 0.9900 - recall_m: 0.9900 - val_loss: 0.2199 - val_accuracy: 0.9736 - val_f1_m: 0.9248 - val_precision_m: 0.9689 - val_recall_m: 0.9028 - lr: 7.2900e-06 Epoch 27/50 850/850 [==============================] - 142s 167ms/step - loss: 0.0181 - accuracy: 0.9882 - f1_m: 0.9880 - precision_m: 0.9888 - recall_m: 0.9876 - val_loss: 0.2214 - val_accuracy: 0.9745 - val_f1_m: 0.9272 - val_precision_m: 0.9703 - val_recall_m: 0.9057 - lr: 7.2900e-06 Epoch 28/50 850/850 [==============================] - ETA: 0s - loss: 0.0196 - accuracy: 0.9876 - f1_m: 0.9876 - precision_m: 0.9876 - recall_m: 0.9876 Epoch 28: ReduceLROnPlateau reducing learning rate to 2.186999927289435e-06. Restoring model weights from the end of the best epoch: 8. 850/850 [==============================] - 140s 165ms/step - loss: 0.0196 - accuracy: 0.9876 - f1_m: 0.9876 - precision_m: 0.9876 - recall_m: 0.9876 - val_loss: 0.2237 - val_accuracy: 0.9736 - val_f1_m: 0.9291 - val_precision_m: 0.9684 - val_recall_m: 0.9094 - lr: 7.2900e-06 Epoch 28: early stopping
dl_acc = history1.history["val_accuracy"][27]
dl_prec = history1.history["val_precision_m"][27]
dl_rec = history1.history["val_recall_m"][27]
dl_f1 = history1.history["val_f1_m"][27]
storeResults('Xception+NASNetMobile',dl_acc,dl_prec,dl_rec,dl_f1)
x=history1
plt.figure(figsize=(20,10))
plt.subplot(1, 2, 1)
plt.suptitle('Optimizer : adam', fontsize=10)
plt.ylabel('Loss', fontsize=16)
plt.plot(x.history['loss'], label='Training Loss')
plt.plot(x.history['val_loss'], label='Validation Loss')
plt.legend(loc='upper right')
plt.subplot(1, 2, 2)
plt.ylabel('Accuracy', fontsize=16)
plt.plot(x.history['accuracy'], label='Training Accuracy')
plt.plot(x.history['val_accuracy'], label='Validation Accuracy')
plt.legend(loc='lower right')
plt.show()
#creating dataframe
import pandas as pd
result = pd.DataFrame({ 'ML Model' : ML_Model,
'Accuracy' : accuracy,
'Precision': precision,
'Recall' : recall,
'F1-Score': f1score,
})
result
| ML Model | Accuracy | Precision | Recall | F1-Score | |
|---|---|---|---|---|---|
| 0 | CNN | 0.946 | 0.947 | 0.946 | 0.946 |
| 1 | MobileNet | 0.941 | 0.944 | 0.937 | 0.940 |
| 2 | Xception | 0.973 | 0.973 | 0.973 | 0.973 |
| 3 | NASNetMobile | 0.779 | 0.779 | 0.779 | 0.779 |
| 4 | Xception+NASNetMobile | 0.974 | 0.968 | 0.909 | 0.929 |
ext.save('ensemble.h5')
classifier = ML_Model
y_pos = np.arange(len(classifier))
import matplotlib.pyplot as plt2
plt2.barh(y_pos, accuracy, align='center', alpha=0.5,color='navy')
plt2.yticks(y_pos, classifier)
plt2.xlabel('Accuracy Score')
plt2.title('Classification Performance')
plt2.show()
plt2.barh(y_pos, precision, align='center', alpha=0.5,color='red')
plt2.yticks(y_pos, classifier)
plt2.xlabel('Precision Score')
plt2.title('Classification Performance')
plt2.show()
plt2.barh(y_pos, recall, align='center', alpha=0.5,color='green')
plt2.yticks(y_pos, classifier)
plt2.xlabel('Recall Score')
plt2.title('Classification Performance')
plt2.show()
plt2.barh(y_pos, f1score, align='center', alpha=0.5,color='brown')
plt2.yticks(y_pos, classifier)
plt2.xlabel('F1 Score')
plt2.title('Classification Performance')
plt2.show()